refereedconferencepapers.bib

@comment{{This file has been generated by bib2bib 1.99}}
@comment{{Command line: bib2bib -ob ./publications/refereedconferencepapers.bib -c 'keywords : ".*conference-paper.*" and keywords : ".*refereed.*" and not keywords : ".*non-refereed.*"' ../publications.bib}}
@inproceedings{Naess:2019aa,
  address = {Porto Alegre, Brazil},
  author = {Torgrim R. N{\ae}ss and Charles P. Martin},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  date-added = {2019-06-10 13:02:03 +1000},
  date-modified = {2019-06-10 13:13:31 +1000},
  keywords = {refereed, conference-paper, artificial-intelligence, music},
  month = jun,
  preprint = {http://folk.uio.no/charlepm/preprints/2019-NIME-PhysicalIntelligentInstrument.pdf},
  series = {NIME '19},
  title = {A Physical Intelligent Instrument using Recurrent Neural Networks},
  year = {2019}
}
@inproceedings{Faitas:2019aa,
  address = {Porto Alegre, Brazil},
  author = {Andrei Faitas and Synne Baumann and Torgim R. N{\ae}ss and Jim Torresen and Charles P. Martin},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  date-added = {2019-06-10 12:59:32 +1000},
  date-modified = {2019-06-10 13:13:57 +1000},
  keywords = {refereed, conference-paper, artificial-intelligence, music},
  month = jun,
  preprint = {http://folk.uio.no/charlepm/preprints/2019-NIME-GeneratingHarmonyBLSTM.pdf},
  series = {NIME '19},
  title = {Generating Convincing Harmony Parts with Bidirectional Long Short-Term Memory Networks},
  year = {2019}
}
@inproceedings{Martin:2019aa,
  archiveprefix = {arXiv},
  author = {Charles P. Martin and Jim Torresen},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  date-added = {2019-01-30 21:23:28 +0100},
  date-modified = {2019-04-12 15:50:01 +1000},
  eprint = {1904.05009},
  keywords = {refereed, conference-paper, artificial-intelligence, music},
  month = jun,
  note = {To appear at NIME '19},
  preprint = {http://folk.uio.no/charlepm/preprints/2019-InteractiveMusicPredictionSystem.pdf},
  series = {NIME '19},
  title = {An Interactive Musical Prediction System with Mixture Density Recurrent Neural Networks},
  video = {https://youtu.be/Kdmhrp2dfHw},
  year = {2019}
}
@inproceedings{Nygaard:2019aa,
  address = {Cham},
  author = {Nygaard, T{\o}nnes F. and Martin, Charles P. and Torresen, Jim and Glette, Kyrre},
  booktitle = {International Conference on the Applications of Evolutionary Computation},
  date-added = {2019-01-30 16:21:02 +0100},
  date-modified = {2019-04-12 15:43:09 +1000},
  doi = {10.1007/978-3-030-16692-2_41},
  editor = {Kaufmann, Paul and Castillo, Pedro A.},
  keywords = {conference-paper, refereed, artificial-intelligence},
  month = apr,
  note = {Presented at EvoApplications '19},
  pages = {616--632},
  preprint = {http://folk.uio.no/charlepm/preprints/2019-EvolvingRobotsOnEasyMode.pdf},
  publisher = {Springer International Publishing},
  title = {Evolving Robots on Easy Mode: Towards a Variable Complexity Controller for Quadrupeds},
  year = {2019},
  bdsk-url-1 = {https://doi.org/10.1007/978-3-030-16692-2_41}
}
@inproceedings{Wallace:2019aa,
  address = {Cham},
  author = {Wallace, Benedikte and Martin, Charles P.},
  booktitle = {International Conference on Computational Intelligence in Music, Sound, Art and Design},
  date-added = {2019-01-30 15:57:22 +0100},
  date-modified = {2019-04-12 15:41:20 +1000},
  doi = {10.1007/978-3-030-16667-0_12},
  editor = {Ek{\'a}rt, Anik{\'o} and Liapis, Antonios and Castro Pena, Mar{\'\i}a Luz},
  keywords = {conference-paper, refereed, artificial-intelligence},
  month = apr,
  note = {Presented at EvoMUSART '19},
  pages = {173--187},
  preprint = {http://folk.uio.no/charlepm/preprints/2019-ComparingModelsForHarmonyPrediction.pdf},
  publisher = {Springer International Publishing},
  title = {Comparing Models for Harmony Prediction in an Interactive Audio Looper},
  year = {2019},
  bdsk-url-1 = {https://doi.org/10.1007/978-3-030-16667-0_12}
}
@inproceedings{Martin:2018ai,
  address = {Montr{\'e}al, Canada},
  author = {Charles P. Martin and Jim Torresen},
  booktitle = {NeurIPS 2018 Workshop on Machine Learning for Creativity and Design},
  date-added = {2018-12-04 15:39:37 +0100},
  date-modified = {2019-02-07 10:15:47 +0100},
  doi = {10.5281/zenodo.2558826},
  keywords = {conference-paper, workshop-paper, refereed},
  month = dec,
  preprint = {http://folk.uio.no/charlepm/preprints/2018-predictive-musical-interaction-with-MDRNNs.pdf},
  title = {Predictive Musical Interaction with MDRNNs},
  url = {https://nips2018creativity.github.io/doc/Predictive_Musical_Interaction_with_MDRNNs.pdf},
  year = {2018},
  bdsk-url-1 = {https://nips2018creativity.github.io/doc/Predictive_Musical_Interaction_with_MDRNNs.pdf}
}
@inproceedings{Nygaard:2018ab,
  archiveprefix = {arXiv},
  author = {T{\o}nnes F. Nygaard and Charles P. Martin and Jim Torresen and Kyrre Glette},
  booktitle = {Proc. of the IEEE Int. Conf. on Robotics \& Automation (ICRA)},
  date-added = {2018-02-28 21:29:41 +0000},
  date-modified = {2019-01-31 10:13:03 +0100},
  eprint = {1803.05629},
  keywords = {refereed, conference-paper, artificial-intelligence},
  month = may,
  note = {To appear at ICRA '19},
  preprint = {http://folk.uio.no/charlepm/preprints/2018-SelfModifyingMorphology.pdf},
  title = {Self-Modifying Morphology Experiments with {DyRET}: Dynamic Robot for Embodied Testing},
  url = {https://arxiv.org/abs/1803.05629},
  video = {https://youtu.be/DLVc0JOWaUM},
  year = {2019},
  bdsk-url-1 = {http://folk.uio.no/charlepm/preprints/2018-SelfModifyingMorphology.pdf}
}
@inproceedings{Martin:2014aa,
  address = {Toronto, Canada},
  author = {Charles Martin and Henry Gardner},
  booktitle = {Proceedings of the {CHI} 2014 Workshop on Curating the Digital: Spaces for Art and Interaction},
  date-added = {2018-02-18 21:01:46 +0000},
  date-modified = {2019-01-08 16:41:52 +0100},
  doi = {10.5281/zenodo.1175599},
  keywords = {conference-paper, workshop-paper, refereed},
  month = apr,
  preprint = {http://folk.uio.no/charlepm/preprints/2014-CHI-CuratingDigital-PreservingMusicalPerformance.pdf},
  title = {Preserving Musical Performance on Touch-Screens},
  url = {http://folk.uio.no/charlepm/preprints/2014-CHI-CuratingDigital-PreservingMusicalPerformance.pdf},
  year = {2014},
  bdsk-url-1 = {http://folk.uio.no/charlepm/preprints/2014-CHI-CuratingDigital-PreservingMusicalPerformance.pdf}
}
@inproceedings{Martin:2018ab,
  author = {Charles P. Martin and Alexander Refsum Jensenius and Jim Torresen},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  date-added = {2018-02-18 19:56:30 +0000},
  date-modified = {2019-02-07 09:56:19 +0100},
  doi = {10.5281/zenodo.1302543},
  keywords = {conference-paper, refereed},
  month = jun,
  pages = {196--197},
  preprint = {http://folk.uio.no/charlepm/preprints/2018-ComposingEnsembleStandstillWork.pdf},
  series = {NIME '18},
  title = {Composing an Ensemble Standstill Work for Myo and Bela},
  url = {http://folk.uio.no/charlepm/preprints/2018-ComposingEnsembleStandstillWork.pdf},
  year = {2018},
  bdsk-url-1 = {http://folk.uio.no/charlepm/preprints/2018-ComposingEnsembleStandstillWork.pdf}
}
@inproceedings{Sanchez:2018aa,
  author = {Gonzalez Sanchez, Victor Evaristo and Agata Zelechowska and Charles P. Martin and Victoria Johnson and Kari Anne Vadstensvik Bjerkestrand and Alexander Refsum Jensenius},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  date-added = {2018-02-18 19:54:36 +0000},
  date-modified = {2019-02-07 09:57:17 +0100},
  doi = {10.5281/zenodo.1302599},
  keywords = {conference-paper, refereed},
  month = jun,
  preprint = {http://folk.uio.no/charlepm/preprints/2018-BelaBasedAugmentedGuitars.pdf},
  series = {NIME '18},
  title = {Bela-Based Augmented Acoustic Guitars for Inverse Sonic Microinteraction},
  url = {http://folk.uio.no/charlepm/preprints/2018-BelaBasedAugmentedGuitars.pdf},
  year = {2018},
  bdsk-url-1 = {http://folk.uio.no/charlepm/preprints/2018-BelaBasedAugmentedGuitars.pdf}
}
@inproceedings{Nygaard:2018aa,
  address = {New York, NY, USA},
  author = {T{\o}nnes F. Nygaard and Charles P. Martin and Eivind Samuelsen and Jim Torresen and Kyrre Glette},
  booktitle = {Proceedings of the Genetic and Evolutionary Computation Conference},
  date-added = {2018-02-18 19:51:47 +0000},
  date-modified = {2018-09-10 23:02:18 +0200},
  doi = {10.1145/3205455.3205567},
  keywords = {conference-paper, refereed},
  month = jul,
  pages = {125--132},
  preprint = {http://folk.uio.no/charlepm/preprints/2018-RealWorldAdaptation.pdf},
  publisher = {{ACM}},
  series = {GECCO '18},
  title = {Real-World Evolution Adapts Robot Morphology and Control to Hardware Limitations},
  url = {http://folk.uio.no/charlepm/preprints/2018-RealWorldAdaptation.pdf},
  video = {https://youtu.be/7r3cV6RuFJU},
  year = {2018},
  bdsk-url-1 = {http://folk.uio.no/charlepm/preprints/2018-RealWorldAdaptation.pdf}
}
@inproceedings{Martin:2018ag,
  address = {Switzerland},
  archiveprefix = {arXiv},
  author = {Charles P. Martin and Jim Torresen},
  booktitle = {Computational Intelligence in Music, Sound, Art and Design: International Conference, {EvoMUSART}},
  date-modified = {2019-02-07 09:59:30 +0100},
  doi = {10.1007/978-3-319-77583-8_11},
  editor = {Liapis, Antonios and Romero Cardalda, Juan Jes{\'u}s and Ek{\'a}rt, Anik{\'o}},
  eprint = {1711.10746},
  keywords = {conference-paper, refereed},
  month = apr,
  pages = {161--176},
  publisher = {Springer International Publishing},
  series = {Lecture Notes in Computer Science},
  title = {{RoboJam}: A Musical Mixture Density Network for Collaborative Touchscreen Interaction},
  url = {http://arxiv.org/abs/1711.10746},
  video = {https://youtu.be/rQbg1AAnWb8},
  volume = {10783},
  year = {2018},
  bdsk-url-1 = {http://arxiv.org/abs/1711.10746}
}
@inproceedings{Martin:2010dk,
  abstract = {In 2009 the cross artform group, Last Man to Die,
                  presenteda series of performances using new
                  interfaces and networkedperformance to integrate the
                  three artforms of its members(actor, Hanna Cormick,
                  visual artist, Benjamin Forster andpercussionist,
                  Charles Martin). This paper explains ourartistic
                  motivations and design for a computer vision
                  surfaceand networked heartbeat sensor as well as the
                  experience ofmounting our first major work, Vital
                  LMTD.},
  address = {Sydney, Australia},
  author = {Martin, Charles and Forster, Benjamin and Cormick, Hanna},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  date-modified = {2018-04-25 14:20:24 +0000},
  doi = {10.5281/zenodo.1177843},
  editor = {Beilharz, Kirsty and Bongers, Bert and Johnston, Andrew and Ferguson, Sam},
  keywords = {conference-paper, refereed,},
  month = jun,
  pages = {204--207},
  preprint = {http://folk.uio.no/charlepm/preprints/2010-NIME-CrossArtforPerformance.pdf},
  title = {Cross-Artform Performance Using Networked Interfaces: {L}ast {M}an to {D}ie's {V}ital {LMTD}},
  url = {http://www.nime.org/proceedings/2010/nime2010_204.pdf},
  video = {https://vimeo.com/46359018},
  year = {2010},
  bdsk-url-1 = {http://www.nime.org/proceedings/2010/nime2010_204.pdf}
}
@inproceedings{Martin:2011oz,
  abstract = { This paper describes Strike on Stage, an interface
                  and cor- responding audio-visual performance work
                  developed and performed in 2010 by percussionists
                  and media artists Chi-Hsia Lai and Charles Martin.
                  The concept of Strike on Stage is to integrate
                  computer visuals and sound into an improvised
                  percussion performance. A large projection surface
                  is positioned directly behind the performers, while
                  a computer vision system tracks their movements. The
                  setup allows computer visualisation and sonification
                  to be directly responsive and unified with the
                  performers' gestures.},
  address = {Oslo, Norway},
  author = {Martin, Charles and Lai, Chi-Hsia},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  date-modified = {2018-04-25 14:19:51 +0000},
  doi = {10.5281/zenodo.1178103},
  editor = {Jensenius, Alexander R. and Tveit, Anders and Godoy, Rolf I. and Overholt, Dan},
  keywords = {conference-paper, refereed,},
  month = may,
  pages = {142--143},
  preprint = {http://folk.uio.no/charlepm/preprints/2011-NIME-StrikeOnStage.pdf},
  title = {{Strike on Stage}: a Percussion and Media Performance},
  url = {http://www.nime.org/proceedings/2011/nime2011_142.pdf},
  video = {https://youtu.be/2TnOI8Ac3PY},
  year = {2011},
  bdsk-url-1 = {http://www.nime.org/proceedings/2011/nime2011_142.pdf}
}
@inproceedings{Martin:2013,
  abstract = {This paper describes the development of an Apple
                  iPhone based mobile computersystem for vibraphone
                  and its use in a series of the author's
                  performanceprojects in 2011 and 2012.This artistic
                  research was motivated by a desire to develop an
                  alternative tolaptop computers for the author's
                  existing percussion and computer
                  performancepractice. The aims were to develop a
                  light, compact and flexible system usingmobile
                  devices that would allow computer music to
                  infiltrate solo and ensembleperformance situations
                  where it is difficult to use a laptop computer.The
                  project began with a system that brought computer
                  elements to NordligVinter, a suite of percussion
                  duos, using an iPhone, RjDj, Pure Data and
                  ahome-made pickup system. This process was
                  documented with video recordings andanalysed using
                  ethnographic methods.The mobile computer music setup
                  proved to be elegant and convenient inperformance
                  situations with very little time and space to set
                  up, as well as inperformance classes and workshops.
                  The simple mobile system encouragedexperimentation
                  and the platforms used enabled sharing with a wider
                  audience.},
  address = {Daejeon, Republic of Korea},
  author = {Charles Martin},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  date-modified = {2018-04-25 14:20:54 +0000},
  doi = {10.5281/zenodo.1178602},
  editor = {Yeo, W. and Lee, K. and A. Sigman and Ji H. and G. Wakefield},
  keywords = {conference-paper, refereed},
  month = may,
  pages = {377--380},
  preprint = {http://folk.uio.no/charlepm/preprints/2013-NIME-MobileComputerSystem.pdf},
  publisher = {Graduate School of Culture Technology, KAIST},
  title = {Performing with a Mobile Computer System for Vibraphone},
  url = {http://nime.org/proceedings/2013/nime2013_121.pdf},
  year = {2013},
  bdsk-url-1 = {http://nime.org/proceedings/2013/nime2013_121.pdf}
}
@inproceedings{Martin:2013fk,
  abstract = {This paper describes a series of musical works
                  designed to integrate mobile computer instruments
                  into a percussion ensemble performance practice. The
                  works were motivated by the author's desire to
                  introduce computer music elements to non-programmer
                  members of the percussion group Ensemble Evolution.
                  Each of the works used simple setups with Apple iOS
                  devices in order to facilitate rehearsals and
                  increase the performers' engagement with the
                  computer elements of the works. This artistic
                  research considers the performance practices that
                  are enabled and demanded when complementing acoustic
                  percussion instruments with mobile music devices.
                  The first two works, 3p3p and Nordlig Vinter used
                  computer music elements composed in Pure Data
                  running on iPhones using RjDj in the context of
                  semi-composed works for percussion. The third work,
                  Snow Music was a collaboratively developed
                  improvised work using percussion and a native iOS
                  app developed with libpd. An ethnographic analysis
                  of the preparation and performance of the three
                  works shows a development in the role of mobile
                  devices in the performances and the emergence of
                  performance practices using both natural
                  interactions and control of generative processes. },
  address = {Perth, Australia},
  author = {Martin, Charles},
  booktitle = {Proceedings of the International Computer Music Conference},
  date-modified = {2018-02-21 16:16:58 +0000},
  keywords = {conference-paper, refereed},
  month = aug,
  pages = {437--440},
  preprint = {http://folk.uio.no/charlepm/preprints/2013-ICMC-IntegratingMobileMusic.pdf},
  title = {Integrating Mobile Music with Percussion Performance Practice},
  url = {http://hdl.handle.net/2027/spo.bbp2372.2013.073},
  year = {2013},
  bdsk-url-1 = {http://hdl.handle.net/2027/spo.bbp2372.2013.073}
}
@inproceedings{Martin:2014cr,
  acmid = 2557226,
  address = {New York, NY, USA},
  author = {Martin, Charles and Gardner, Henry and Swift, Ben},
  booktitle = {Proceedings of the {SIGCHI} Conference on Human Factors in Computing Systems},
  date-modified = {2018-02-22 20:42:50 +0000},
  doi = {10.1145/2556288.2557226},
  isbn = {978-1-4503-2473-1},
  keywords = {conference-paper, refereed},
  location = {Toronto, Ontario, Canada},
  month = apr,
  numpages = 4,
  pages = {1025--1028},
  preprint = {http://folk.uio.no/charlepm/preprints/2014-CHI-ExploringPercussiveGesture.pdf},
  publisher = {{ACM}},
  series = {{CHI} '14},
  title = {Exploring Percussive Gesture on i{P}ads with {E}nsemble {M}etatone},
  url = {http://doi.acm.org/10.1145/2556288.2557226},
  video = {https://youtu.be/NqdxCteYRFk},
  year = {2014},
  bdsk-url-1 = {http://doi.acm.org/10.1145/2556288.2557226},
  bdsk-url-2 = {https://dx.doi.org/10.1145/2556288.2557226}
}
@inproceedings{Martin:2014xp,
  acmid = 2574805,
  address = {New York, NY, USA},
  author = {Martin, Charles and Gardner, Henry and Swift, Ben},
  booktitle = {{CHI} '14 Extended Abstracts on Human Factors in Computing Systems},
  date-modified = {2018-02-21 21:10:06 +0000},
  doi = {10.1145/2559206.2574805},
  isbn = {978-1-4503-2474-8},
  keywords = {conference-paper, refereed, demonstration},
  location = {Toronto, Ontario, Canada},
  month = apr,
  numpages = 4,
  pages = {547--550},
  preprint = {http://folk.uio.no/charlepm/preprints/2014-CHI-MetaTravelsMetaLonsdale.pdf},
  publisher = {ACM},
  series = {{CHI} {EA} '14},
  title = {{M}eta{T}ravels and {M}eta{L}onsdale: {iPad} Apps for Percussive Improvisation},
  year = {2014},
  bdsk-url-1 = {https://dx.doi.org/10.1145/2559206.2574805}
}
@inproceedings{Martin:2015cr,
  abstract = {We present a study where a small group of experienced
                  iPad musicians evaluated a system of three musical
                  touch-screen apps and two server-based agents over
                  18 controlled improvisations. The performers'
                  perspectives were recorded through surveys,
                  interviews, and interaction data. Our agent
                  classifies the touch gestures of the performers and
                  identifies new sections in the improvisations while
                  a control agent returns similar messages sourced
                  from a statistical model. The three touch-screen
                  apps respond according to design paradigms of
                  reward, support, and disruption. In this study of an
                  ongoing musical practice, significant effects were
                  observed due to the apps' interfaces and how they
                  respond to agent interactions. The ``reward'' app
                  received the highest ratings. The results were used
                  to iterate the app designs for later performances.},
  author = {Charles Martin and Henry Gardner and Ben Swift and Michael Martin},
  booktitle = {Proceedings of the 2015 Conference of the {Australasian Computer Music Association}},
  date-modified = {2018-02-18 20:47:58 +0000},
  editor = {Jon Drummond and Donna Hewitt and Sophea Lerner and Ian Stevenson},
  keywords = {conference-paper, refereed},
  month = nov,
  numpages = 10,
  pages = {85--94},
  preprint = {http://folk.uio.no/charlepm/preprints/2015-ACMC-Music18Performances.pdf},
  publisher = {Australasian Computer Music Association},
  series = {ACMC2015 - MAKE!},
  title = {Music of 18 Performances: Evaluating Apps and Agents with Free Improvisation},
  url = {http://hdl.handle.net/1885/95205},
  year = {2015},
  bdsk-url-1 = {http://hdl.handle.net/1885/95205}
}
@inproceedings{Martin:2015jk,
  abstract = {We present and evaluate a novel interface for
                  tracking ensemble performances on touch-screens. The
                  system uses a Random Forest classifier to extract
                  touch-screen gestures and transition matrix
                  statistics. It analyses the resulting gesture-state
                  sequences across an ensemble of performers. A series
                  of specially designed iPad apps respond to this
                  real-time analysis of free-form gestural
                  performances with calculated modifications to their
                  musical interfaces. We describe our system and
                  evaluate it through cross-validation and profiling
                  as well as concert experience.},
  address = {Baton Rouge, Louisiana, USA},
  author = {Charles Martin and Henry Gardner and Ben Swift},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  date-modified = {2018-04-25 14:21:31 +0000},
  doi = {10.5281/zenodo.1179130},
  editor = {Edgar Berdahl and Jesse Allison},
  keywords = {conference-paper, refereed},
  month = may,
  pages = {359--364},
  preprint = {http://folk.uio.no/charlepm/preprints/2015-NIME-TrackingEnsemblePerformance.pdf},
  publisher = {Louisiana State University},
  title = {Tracking Ensemble Performance on Touch-Screens with Gesture Classification and Transition Matrices},
  url = {http://www.nime.org/proceedings/2015/nime2015_242.pdf},
  year = {2015},
  bdsk-url-1 = {http://www.nime.org/proceedings/2015/nime2015_242.pdf}
}
@inproceedings{Martin:2015mz,
  abstract = {The group experience of synchronisation is a key
                  aspect of ensemble musical performance. This paper
                  presents a number of strategies for syncing
                  performance information across networked
                  iPad-instruments to enable creativity among an
                  ensemble of improvising musicians. Acoustic
                  instrumentalists sync without mechanical
                  intervention. Electronic instruments frequently
                  synchronise rhythm using MIDI or OSC connections. In
                  contrast, our system syncs other aspects of
                  performance, such as tonality, instrument functions,
                  and gesture classifications, to support and enhance
                  improvised performance. Over a number of
                  performances with an iPad and percussion group,
                  Ensemble Metatone, various syncing scenarios have
                  been explored that support, extend, and disrupt
                  ensemble creativity.},
  address = {Brisbane, Australia},
  author = {Charles Martin and Henry Gardner},
  booktitle = {Proceedings of {C}reate{W}orld},
  date-modified = {2019-01-08 16:40:04 +0100},
  keywords = {conference-paper, refereed},
  month = feb,
  preprint = {http://folk.uio.no/charlepm/preprints/2015-CreateWorld-SynchingFeeling.pdf},
  publisher = {Griffith University},
  title = {That Syncing Feeling: Networked Strategies for Enabling Ensemble Creativity in i{P}ad Musicians},
  url = {http://hdl.handle.net/1885/95216},
  year = {2015},
  bdsk-url-1 = {http://hdl.handle.net/1885/95216}
}
@inproceedings{Martin:2016aa,
  author = {Charles Martin and Henry Gardner},
  booktitle = {Proceedings of the {CHI} Human-Centered Machine Learning Workshop},
  conference-url = {http://hcml2016.goldsmithsdigital.com},
  date-modified = {2018-02-21 21:16:27 +0000},
  doi = {10.5281/zenodo.56379},
  keywords = {conference-paper, workshop-paper, refereed},
  month = may,
  title = {Can Machine Learning Apply to Musical Ensembles?},
  url = {http://folk.uio.no/charlepm/preprints/2016-CHI-HCML-MachineLearningEnsembles.pdf},
  year = {2016},
  bdsk-url-1 = {https://dx.doi.org/10.5281/zenodo.56379}
}
@inproceedings{Martin:2016ab,
  author = {Charles Martin and Henry Gardner},
  booktitle = {Proceedings of the {CHI} Musical {HCI} Workshop},
  conference-url = {http://mcl.open.ac.uk/music-chi/},
  date-modified = {2018-02-21 21:12:52 +0000},
  doi = {10.5281/zenodo.56378},
  keywords = {conference-paper, workshop-paper, refereed,},
  month = may,
  title = {Free-Improvised Rehearsal-as-Research for Musical {HCI}},
  url = {http://folk.uio.no/charlepm/preprints/2016-CHI-MusicHCI-RehearsalAsResearch.pdf},
  year = {2016},
  bdsk-url-1 = {https://dx.doi.org/10.5281/zenodo.56378}
}
@inproceedings{Martin:2016vn,
  address = {New York, NY, USA},
  author = {Charles Martin and Henry Gardner and Ben Swift and Michael Martin},
  booktitle = {Proceedings of the {SIGCHI} Conference on Human Factors in Computing Systems},
  date-modified = {2018-02-22 20:40:57 +0000},
  doi = {10.1145/2858036.2858269},
  keywords = {conference-paper, refereed},
  month = may,
  pages = {2295-2306},
  preprint = {http://folk.uio.no/charlepm/preprints/2016-CHI-intelligent-agents-networked-buttons.pdf},
  publisher = {ACM},
  series = {CHI '16},
  title = {Intelligent Agents and Networked Buttons Improve Free-Improvised Ensemble Music-Making on Touch-Screens},
  video = {https://youtu.be/lYz1DOfPmnY},
  year = {2016},
  bdsk-url-1 = {https://dx.doi.org/10.1145/2858036.2858269}
}
@inproceedings{Martin:2017ab,
  abstract = {MicroJam is a mobile app for sharing tiny touch-screen performances. Mobile applications that streamline creativity and social interaction have enabled a very broad audience to develop their own creative practices. While these apps have been very successful in visual arts (particularly photography), the idea of social music-making has not had such a broad impact. MicroJam includes several novel performance concepts intended to engage the casual music maker and inspired by current trends in social creativity support tools. Touch-screen performances are limited to five seconds, instrument settings are posed as sonic ``filters'', and past performances are arranged as a timeline with replies and layers. These features of MicroJam encourage users not only to perform music more frequently, but to engage with others in impromptu ensemble music making.},
  address = {Denmark},
  author = {Charles P. Martin and Jim Torresen},
  booktitle = {Proceedings of the International Conference on New Interfaces for Musical Expression},
  conference-url = {http://www.nime.org/proceedings/2017/nime2017_paper0096.pdf},
  date-modified = {2018-04-25 14:22:08 +0000},
  doi = {10.5281/zenodo.1176334},
  keywords = {conference-paper, refereed},
  month = may,
  pages = {495--496},
  preprint = {http://folk.uio.no/charlepm/preprints/2017-NIME-Microjam.pdf},
  publisher = {Aalborg University Copenhagen},
  series = {NIME '17},
  title = {MicroJam: An App for Sharing Tiny Touch-Screen Performances},
  url = {http://urn.nb.no/URN:NBN:no-58823},
  video = {https://youtu.be/SkUjjQd13KU},
  year = {2017},
  bdsk-url-1 = {http://urn.nb.no/URN:NBN:no-58823}
}
@inproceedings{Martin:2017ac,
  address = {Espoo, Finland},
  author = {Charles P. Martin and Jim Torresen},
  booktitle = {Proceedings of the 14th Sound and Music Computing Conference},
  date-modified = {2019-02-07 10:06:37 +0100},
  doi = {10.5281/zenodo.1401907},
  editor = {Tapio Lokki and Jukka P{\"a}tynen and Vesa V{\"a}lim{\"a}ki},
  keywords = {conference-paper, refereed},
  month = jul,
  pages = {175--180},
  preprint = {http://folk.uio.no/charlepm/preprints/2017-SMC-ExploringSocialMobileMusic.pdf},
  publisher = {Aalto University},
  series = {SMC '17},
  title = {Exploring Social Mobile Music with Tiny Touch-Screen Performances},
  url = {http://urn.nb.no/URN:NBN:no-60558},
  video = {https://youtu.be/SkUjjQd13KU},
  year = {2017},
  bdsk-url-1 = {http://smc2017.aalto.fi/media/materials/proceedings/SMC17_p175.pdf}
}
@inproceedings{Martin:2017ae,
  abstract = {For many, the pursuit and enjoyment of musical performance goes hand-in-hand with collaborative creativity, whether in a choir, jazz combo, orchestra, or rock band. However, few musical interfaces use the affordances of computers to create or enhance ensemble musical experiences. One possibility for such a system would be to use an artificial neural network (ANN) to model the way other musicians respond to a single performer. Some forms of music have well-understood rules for interaction; however, this is not the case for free improvisation with new touch-screen instruments where styles of interaction may be discovered in each new performance. This paper describes an ANN model of ensemble interactions trained on a corpus of such ensemble touch-screen improvisations. The results show realistic ensemble interactions and the model has been used to implement a live performance system where a performer is accompanied by the predicted and sonified touch gestures of three virtual players.},
  author = {Charles P. Martin and Kai Olav Ellefsen and Jim Torresen},
  booktitle = {Proceedings of the 12th International Audio Mostly Conference on Augmented and Participatory Sound and Music Experiences},
  date-modified = {2018-02-18 20:23:05 +0000},
  doi = {10.1145/3123514.3123556},
  keywords = {conference-paper, refereed, artificial-intelligence},
  month = aug,
  preprint = {http://folk.uio.no/charlepm/preprints/2017-AudioMostly-DeepModelsEnsmbleImprovisation.pdf},
  series = {AM '17},
  title = {Deep Models for Ensemble Touch-Screen Improvisation},
  year = {2017},
  bdsk-url-1 = {https://dx.doi.org/10.1145/3123514.3123556}
}