| 
  • If you are citizen of an European Union member nation, you may not use this service unless you are at least 16 years old.

  • Want to organize your cloud files? Sign up for a free webinar to see how Dokkio (a new product from PBworks) can help you find, organize, and collaborate on your Drive, Gmail, Dropbox, and Slack files: Weds, May 27 at 2PM Eastern / 11AM Pacific
View
 

CloudyVoices

Page history last edited by Hugo Solis 10 years ago

The goal of this project is to create an automatic generator of sonic scenes using as sources the cappella files that are at CCmixter. While the ReMIX of EchoNest is very good for manipulating segments it is not the optimal tool for sound manipulation, and spectral transformations. For this reason I decided to use SuperCollider to do the transformations. The system works in the following way:

 

 

  • The user selects a word that is searched on the Tags of the cappella sounds of CCmixter. The word can be for example "sad", "love", "night", etc...
  • The MP3 file is downloaded to the desktop  computer and converted to WAV
  • The MP3 file is uploaded to EchoNest for doing the sound analysis
  • The duration and start point of the analysis are used for generating the new sonic scene.
  • Using these parameters SuperCollider mash up the sound file and apply a couple of transformations. On each segment a random amount of FFT bins are cleared. In future version the amount of erased beans will be determined for the Timbre value of the analysis
  • Once that the new piece is finished it is converted again to an MP3 file. ToDO is to upload this new work to SoundCloud.

 

SuperCollider is used for the sound transformations but the downloading, uploading, and rules of composition are generated in Groovy an scripting language for java. For this reason the Java EchoNest is used as well as the Java API of Sound Cloud.

 

 

 

 

 

The entire code presented here

Scomber - The Saddest Story Pellas MASH by user1359161

 

import com.echonest.api.v3.EchoNestException

import com.echonest.api.v3.track.FloatWithConfidence

import com.echonest.api.v3.track.Metadata

import com.echonest.api.v3.track.TrackAPI

import com.echonest.api.v3.track.TrackAPI.AnalysisStatus

 

import de.sciss.jcollider.Server

import de.sciss.jcollider.gui.ServerPanel

 

 

def getOnePiece(query){

 println "querying the ccmixter with query $query"

 //input = new XmlParser().parse("http://ccmixter.org/api/query?tags=sad&t=links_dl&f=xml")

 address = "http://ccmixter.org/api/query?tags=$query+acappella+mp3&t=links_dl&f=csv" as String

 file = new FileOutputStream("tmpFile.txt".tokenize("/")[-1])

 out = new BufferedOutputStream(file)

 out << new URL(address).openStream()

 out.close()

 myFile = new File("tmpFile.txt")

 urls = []

 myFile.eachLine{urls << it.tokenize(',')[5]}

 urls = urls[1..<urls.size()]

 myRandom = new Random()

 urls[myRandom.nextInt(urls.size())]

}

 

def downloadPiece(link){

 println "downloading piece"

 file = new FileOutputStream(link.tokenize("/")[-1])

 out = new BufferedOutputStream(file)

 out << new URL(link).openStream()

 out.close()

 println "done with downloading"

}

 

def mp3toWav(link){

  println "converting to wav"

  newName = link.tokenize(".")[0] << ".wav"

  command = "/Applications/lame --decode $link $newName" as String

  def outStream = new ByteArrayOutputStream(4096)

  def errStream = new ByteArrayOutputStream(4096)

  Process p = command.execute()

  p.consumeProcessOutput(outStream, errStream)

  p.waitFor()

  //println 'out:' + outStream

  //println 'err:' + errStream

  return newName

}

 

 

def wavtoMp3(link){

  newName = link.tokenize(".")[0] << ".mp3"

  println "converting $link to $newName"

  command = "/Applications/lame -V2 $link $newName" as String

  def outStream = new ByteArrayOutputStream(4096)

  def errStream = new ByteArrayOutputStream(4096)

  Process p = command.execute()

  p.consumeProcessOutput(outStream, errStream)

  p.waitFor()

  //println 'out:' + outStream

  //println 'err:' + errStream

  return newName

}

 

def uploadAndAnalyze(link){

 TrackAPI trackAPI = new TrackAPI("YOUR KEY")

 println "uploading"

 String id = trackAPI.uploadTrack(new File(link), false)

 AnalysisStatus status = trackAPI.waitForAnalysis(id, 60000)

 segments = null

 if (status == AnalysisStatus.COMPLETE) {

     metadata = trackAPI.getMetadata(id);

     println "metadata: $metadata"

     segments = trackAPI.getSegments(id);

 }

 else println "status $status"

 return [metadata, segments]

}

 

def buildScore(buffer, data){

    metadata = data[0]

    segments = data[1]

    file = new File("raw.scd")

    file.write('') //toclean

    file << "[\n"

    file << """[0.1, [\\b_allocRead, 0, "/Users/hugosg/$buffer"]],\n"""

    myRan = new Random()

    segments.eachWithIndex{seg, counter ->

         id = 1000 + counter

         pos = seg.getStart() * metadata.getSamplerate()

         loc = seg.getStart() + 1

         dur = seg.getDuration()

         file << "[$loc, [\\s_new, \\NRTsound, $id, 0, 0, \\dur, $dur, \\pos, $pos]],\n"

    }

    duration = metadata.getDuration() + 1

    file << "[$duration, [\\n_free, 1000]]\n"

    file << "]\n"

}

 

def buildScoreMash1(buffer, data){

    metadata = data[0]

    segments = data[1]

    file = new File("raw.scd")

    file.write('') //toclean

    file << "[\n"

    file << """[0.1, [\\b_allocReadChannel, 0, "/Users/hugosg/$buffer", 0, -1, 0]],\n"""

    myRan = new Random()

    segments[0..10].eachWithIndex{seg, counter ->

         id = 1000 + counter

         pos = seg.getStart()

         loc = seg.getStart() + 1

         dur = seg.getDuration()

         trate = Math.abs(seg.getTimbre()[0] * 10)

         trate = 100

         file << "[$loc, [\\s_new, \\NRTmash01, $id, 0, 0, \\dur, $dur, \\pos, $pos, \\trate, $trate]],\n"

    }

    duration = metadata.getDuration() + 1

    file << "[$duration, [\\n_free, 1000]]\n"

    file << "]\n"

}

 

 

def buildScoreMash2(buffer, data){

    metadata = data[0]

    segments = data[1]

    file = new File("raw.scd")

    file.write('') //toclean

    file << "[\n"

    file << """[0.1, [\\b_allocReadChannel, 0, "/Users/hugosg/$buffer", 0, -1, 0]],\n"""

    file << "[0.1, [\\b_alloc, 1,2048,1]],\n"

    myRan = new Random()

    segments.eachWithIndex{seg, counter ->

         id = 1000 + counter

         pos = seg.getStart()

         loc = seg.getStart() + 1

         dur = seg.getDuration()

         trate = Math.abs(seg.getPitches()[0] * 10)

         trate = 100

         file << "[$loc, [\\s_new, \\NRTmash02, $id, 0, 0, \\dur, $dur, \\pos, $pos, \\trate, $trate]],\n"

         file << "[$loc, [\\s_new, \\NRTmash03, $id, 0, 0, \\dur, $dur, \\pos, $pos, \\trate, $trate]],\n"

         file << "[$loc, [\\s_new, \\NRTmash04, $id, 0, 0, \\dur, $dur, \\pos, $pos, \\trate, $trate]],\n"

    }

}

 

//this will call startup which has a link to score which build the osc of the raw.scd

def getOSC(){

  Process p = "/Applications/SuperCollider/SuperCollider.app/Contents/MacOS/SuperCollider".execute()

  sleep(5000)

  p.waitForOrKill(1000)

}

 

def runNRT(link){

  ant = new AntBuilder()   // create an antbuilder

  ant.exec(outputproperty:"cmdOut",

             errorproperty: "cmdErr",

             resultproperty:"cmdExit",

             failonerror: "true",

             dir: "/Applications/SuperCollider/",

             resolveexecutable: "true",

             executable: "scsynth") {

               arg(line:"-N /Users/hugosg/tmp.osc _ /Users/hugosg/$link 44100 WAV int16 -o 2")

             }

  println "return code:  ${ant.project.properties.cmdExit}"

  println "stderr:       ${ant.project.properties.cmdErr}"

  println "stdout:       ${ ant.project.properties.cmdOut}"

  println "done with NRT"

}

 

link = getOnePiece("sad")

downloadPiece(link)

fileName = link.tokenize("/")[-1]

wavName = mp3toWav(fileName) as String

data = uploadAndAnalyze(fileName)

buildScoreMash2(wavName, data)

getOSC()

mashNameWAV = wavName.tokenize(".")[0] << "_MASH.wav" as String

runNRT(mashNameWAV)

wavtoMp3(mashNameWAV)

println "done"

 

 

Comments (0)

You don't have permission to comment on this page.