sqoop-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From build...@apache.org
Subject svn commit: r930498 [10/12] - in /websites/staging/sqoop/trunk/content: ./ docs/1.99.4/ docs/1.99.4/_sources/ docs/1.99.4/_static/ docs/1.99.4/css/ docs/1.99.4/src/ docs/1.99.4/src/main/ docs/1.99.4/src/main/webapp/ docs/1.99.4/src/main/webapp/WEB-INF/...
Date Tue, 25 Nov 2014 21:57:11 GMT
Added: websites/staging/sqoop/trunk/content/docs/1.99.4/searchindex.js
==============================================================================
--- websites/staging/sqoop/trunk/content/docs/1.99.4/searchindex.js (added)
+++ websites/staging/sqoop/trunk/content/docs/1.99.4/searchindex.js Tue Nov 25 21:57:10 2014
@@ -0,0 +1 @@
+Search.setIndex({terms:{expect:[0,2,5],strarg:2,place:5,bit:7,actual:2,usual:[8,9],also:[2,4,5,9,10,7],getinput:4,overrid:2,hdfs_write_op:10,extend:2,file_bytes_written:10,togeth:2,wtite:2,marker:2,capabl:[2,5,7],unsign:10,handler:10,hdfs_large_read_op:10,client:[0,1,4,3,5,6,7],classavail:2,uniqu:[2,4,3,10],repositoryload:9,seriou:4,show:[1,8,3,5,9,10],deploy:9,bin:0,tear:9,ecosystem:10,testlink:10,arbitrari:0,savejob:4,string:[2,4,3,10],local:[1,2,8,10],permiss:6,compress:[3,10],gzip:[3,10],further:[0,9,10],differ:[0,2,8,3,7],peris:3,full:[0,5,10],localhost:[4,3,5,10],empti:[2,5],demo:[3,6],structur:[9,10,6],tool:[0,5,9,6,7],record:[2,4,10],"3amysql":10,describ:[1,2,4,3,5,7],continu:[2,10],filesystemcount:10,hadoop:[0,8,4,9,10],loadercontext:2,mai:[2,6,7],across:2,interact:[2,4,3,5,10],automat:[8,5,9,7],entri:[0,3],until:[2,4],appropri:[0,8],warranti:6,getschema:2,overview:6,savelink:4,content:[0,2,5,9,10],succe:9,thu:[0,2,4,5,10],incompat:[0,3],lifecyl:2,replac:7,trigger:10,"enum"
 :[2,10],comment:[0,5],effici:6,manag:[2,3,5,10],below:[1,2,5,10,6],mode:[3,5],instead:[3,9],webapp:[3,5,10],prepar:2,"static":[2,4],click:1,connectionconfigur:2,would:4,job_1412137947693_0004:10,repositoryloadtool:9,cid:[4,3,5,10],administr:[0,9,6],sql:[3,10],"void":[2,4],gettojobconfig:4,instanc:[2,10],might:[0,8,3,5,9,10,7],remot:10,prefix:2,mon:[3,10],getto:2,gc_time_milli:10,output:[2,3,10],jump:10,deletejob:4,henc:[2,4,10],yet:2,optim:2,taskcount:10,atleast:2,getfromjobconfig:4,altern:1,here:[2,4,10],jname:10,write:[2,4,3,9,10,6],reli:3,counter:[4,10],come:10,veri:0,basi:6,who:10,workflow:4,himself:9,extractor:[2,3,10],onli:[0,2,8,4,5,10,7],rest:[0,3,5,10,6],getpartit:2,ident:9,transmiss:5,ownership:6,number:[0,4,10],"try":6,field:[2,10],backup:7,consumerthread:2,mean:[2,4],simpli:7,stack:10,remain:[0,3,10],schema:[2,3,10,6,7],correctli:[0,8,9,7],specifi:[0,2,8,4,5,10],broadli:2,sqoopclient:4,command:[0,1,3,4,5,6,7,8,9],"true":[1,2,4,3,5,10,7],alwai:[9,10],choos:[2,3,9],resolv:
 4,lz4:[3,10],some:10,desir:[0,9],zsh:5,text_fil:[3,10],tabl:[2,3,5,10],unabl:10,derbyrepo_0030:10,identifi:[4,3,10],softwar:[8,10,6],getjob:[4,10],createjob:4,main:[2,4,9],poll:[4,5],denot:5,too:3,intermediatedataformat:2,interpret:5,unit:8,assembl:10,namenod:0,hand:6,cycl:[0,2],sqoopmapp:2,done:2,previou:[2,5],get:[4,5,10,6],variabl:[1,9],polltim:4,ioexcept:2,partitioncolumn:4,necessari:[10,7],schemanam:[4,10],part:[0,2,4,3,10,7],submit:[4,5,10],"2fsqoop":10,save:[4,5],your:[0,8,3,5,10,6],repositorydump:9,rang:2,intern:[2,5,6],columnar:2,unless:6,dure:[10,7],setcreationus:4,line:[0,8,3,5,10,6],compat:[0,5],getvalidationmessag:4,start:[0,4,3,5,6,7,9,10],sqoopsplit:2,writitng:2,avoid:7,getfromconfig:4,sqoopconnector:2,startjob:4,improv:5,deletelink:4,given:[1,2,4,5,9,10],avail:[0,2,8,3,5,9,10],code:[1,2,8,4,10,6],classpath:[1,9],none:[3,10],warn:4,privat:[2,4],dhadoop:8,initializercontext:2,mutipl:2,branch:8,larger:8,sqooppartit:2,group:[2,4,10],found:10,"2fmysql":10,strongli:7,sensi
 t:[2,9,10],thi:[0,1,2,3,4,5,6,7,8,9,10],derbi:[10,7],focu:2,build:[1,8,3,5,10,6],fqdn:5,customcompress:10,ctrl:5,date:[3,10],within:[2,10],"3a8020":10,setup:[1,6],jobconfigur:2,aspect:10,resultset:2,argument:[2,4,5],insid:[0,9,10],getstr:4,getjobstatu:4,rule:10,numload:10,tutori:4,alreadi:[0,3,5,9],futur:2,doe:2,through:[2,3],second:[0,8,3,10],first:[2,8,4,3,10,7],loader:[0,2,3,10],sta:[3,5],integ:10,contribut:[2,6],mtoconfig:4,abort:10,countergroup:4,ddownloadjavadoc:1,"abstract":2,partit:[2,3],went:4,now:10,represent:2,add:[0,2],writer:2,configvalid:2,access:7,machin:[0,9],set:[0,2,3,4,5,6,7,8,9,10],requir:[1,2,3,4,5,6,7,8,9,10],version:[0,1,2,3,4,5,6,7,8,9,10],belong:10,genericjdbcpartit:2,groupid:4,sqoop2:[0,1,8,3,5,9,10,7],conf:0,souurc:2,titl:3,succeed:10,configurationclass:2,ent:[3,10],mfromconfig:4,each:[0,1,2,3,5,9,10],entiti:[2,4,3,10,7],"import":1,document:[0,1,2,4,5,10,6,7],"3a8090":10,instruct:[1,7],instanti:2,elabor:10,app:10,profil:8,left:10,trunk:10,print:[4,5,9],sec
 tion:[0,2,5,10],initi:[2,4,10],specif:[0,10,6],transfer:[2,4,3,6],method:[2,4,10],step:[0,1,2,8,7],proxi:[3,10],develop:[1,2,8,6],establish:2,creat:[0,1,2,8,4,3,5,9,10],put:10,hold:[2,4,5],lib:[0,1,10],label:[2,10],resourc:[2,4,5,10],info:4,lid:[4,5,10],packag:[0,1,2,8],librari:0,dialog:1,sqoop2rc:5,relev:[4,10],law:6,merged_map_output:10,past:10,point:0,getfrom:2,getcount:4,effect:9,assum:0,typic:10,emptyconfigur:2,node:0,don:[0,2],length:10,occur:[2,10],greater:4,throttlingconfig:[4,10],finish:[0,1,2,4,9,10,7],respons:[2,10],must:[0,2,8,3,10],bootstrap:7,com:[4,3,10],batch:5,certain:10,root:[4,3,10],interfac:[2,3,5],where:[0,1,2,5,10],application_1412137947693_0004:10,never:10,option:[1,2,3,5,9],"class":[0,2,4,3,9,10,7],paramet:[1,4,10],wrong:4,user:[0,2,4,5,9,10,6],consult:0,activ:2,getsplit:2,indent:3,well:[0,2,5,6],checkout:8,els:4,why:9,exact:3,term:2,util:[2,7],more:[0,2,4,5,10,6],txt:10,path:[0,1,5,10,7],linkid:4,procedur:[3,10,7],driver:[0,2,8,4,3,5,10,7],sqooprecordwrit:2,
 dataoutput:2,json:[9,10],upgrad:[2,9,10,6,7],annot:2,pleas:[0,8,3,5,9,10,7],creation:[1,3,5,10],getconfig:4,"long":[4,9],tandem:2,cassandra:6,includ:[2,9,10],balanc:2,pend:2,end:[3,5],ship:0,progress:[4,3,10],govern:6,featur:[2,7],newurl:4,subset:2,move:[0,7],about:[3,5],few:10,file:[0,1,2,5,9,10,6,7],check:[0,8,4,3,5,9,6],those:[2,10],offici:8,than:[2,4,7],testinput:10,linkconfig:[2,4,10],call:[0,2,8,3,5],genericjdbcfromdestroy:2,mani:[2,4],note:[2,8,3,5,9,10,7],want:[0,10],valu:[0,2,8,4,3,5,10,7],rows_read:10,abil:5,getjobconfigurationclass:2,proce:4,support:[0,2,8,4,3,5,10,6],recal:[3,5],wip:8,hbase:6,sqoopcount:10,sqoopreduc:2,driven:2,phase:2,nor:2,header:10,what:2,doit:10,sure:[0,10],encourag:2,respect:[8,4,5,10],java_hom:1,verifi:[0,3,9],themselv:2,storagetyp:10,written:[1,2],pass:[2,4],"switch":2,order:[2,3,9,10,7],fit:0,connector:[2,4,3,5,10,6,7],contain:[0,3,5,9,10,7],resource_bundle_nam:2,etl:2,seen:3,addit:[2,4,5,10,6,7],servic:[0,9],issu:[4,6],admin:6,explain:[2,4,10],a
 rtefact:0,manual:[1,8,9,7],autoupgrad:7,jdbcrepositori:10,tojobconfigur:[2,10],timestamp:10,over:[2,10],url:[4,3,5,10],repo:8,storag:10,jobtrack:0,provid:[0,2,4,3,5,10],prefer:7,"2fjob1":10,partitionercontext:2,genericjdbcconnectorconst:2,brows:1,connectionstr:[2,4,10],vari:7,mb_millis_map:10,genericjdbcload:2,"public":[2,4],sinc:[2,10],requisit:4,apach:[0,2,3,4,5,6,7,8,9,10],repres:[2,4,5,10],mdriverconfig:4,bzip2:[3,10],distribut:[0,2,8,9,6],least:8,www:6,cpu_millisecond:10,getdatawrit:2,edit:[3,5],same:[0,2,4,3,9,10,7],just:0,readfield:2,derbyrepositoryhandl:10,column:[2,3,10],environment:9,stopjob:4,"boolean":10,interrupt:5,pure:5,genericjdbctoiniti:2,stp:[3,5],invok:[2,4],org:[0,2,3,4,5,6,7,8,9,10],vcores_millis_map:10,exercis:10,complianc:6,text:10,catalina:0,writabl:2,how:[0,1,2,8,4,10,7],gettoconfig:4,job:[2,4,3,5,9,10,7],determin:2,job_1412137947693_0001:3,convent:2,synchron:[4,5],partition:[2,10],select:[1,4],context:[2,10],inconsist:9,dummi:4,common:[0,1,3,5,10],test:[8,1
 0],tmp:[4,10],lzo:[3,10],extract:2,failure_on_submit:10,application_1412137947693_0001:3,messag:[0,4,3,5,9,10],licens:[0,3,6],copyright:6,getsupporteddirect:2,statu:[2,4,3,5,10],compani:5,down:9,properti:[0,2,3,5,7],custom:[2,3,10],languag:[10,6],histori:[3,5],shell:[0,3,5,6],understand:[2,10],integr:8,facil:[2,4],tweak:0,store:[0,2,9,10,6],need:[0,2,3,4,5,7,8,9,10],abstractvalid:2,ani:[0,2,4,3,5,6,7,9,10],care:[2,9],valdat:10,bundl:[0,2,3],result:[5,10],port:[0,3,5,10],two:[0,2,8,3,5,7],jdbcdriver:[2,4,10],stagetablenam:10,interest:[4,6],releas:[10,7],similar:[0,2,8,3],back:7,special:8,express:[10,6],applic:[4,5,10,6],linux:5,origin:7,spilled_record:10,tostr:2,evolv:[2,3],pbinari:8,modul:1,propertiesconfigurationprovid:0,between:[2,4],getvalu:4,eclips:1,face:0,impli:6,expos:[2,10],decompress:0,out:[0,2,8,4,5,9,10,6],unexpect:3,resourcemanag:0,getdataread:2,higher:7,fromjob:[2,3],printmessag:4,walk:3,api:[2,4,3,5,10,6],janm:10,getconnector:4,process:[2,9,10,7],post:10,password:[2,4,
 3,9,10],mlink:4,similarli:[0,5,10],setvalu:4,extractorcontext:2,split:2,getconfigurableupgrad:2,program:10,addmessag:2,them:[0,2,4,3,9,10],"short":[3,5],task:[2,9],when:[2,4,5,9,10,7],serv:0,directori:[0,1,3,5,10],virtual_memory_byt:10,"export":[1,10],notic:6,window:1,jvm:1,accept:5,compil:[8,3],writearrayrecord:2,gener:[1,2,4,3,9,10,7],databas:[2,3,9,10,6,7],fatal:4,practic:10,genericjdbcpartition:2,mysql:[4,3,10,7],shutdown:7,listen:10,nameservice1:3,mjob:4,send:[2,4,5,10],work:[0,2,8,3,5,6],control:[2,8,4],getjar:2,accomplish:2,setcont:2,notempti:2,stage:2,sampl:2,workspac:1,host:[0,8,3,5,10],instac:10,timeout:5,"2fnamenod":10,type:[2,4,10],correct:4,getrecordwrit:2,physical_memory_byt:10,suppli:10,vampir:4,getstatu:4,subsystem:9,consist:10,valdiat:[2,4],mlinkconfig:4,tojobconfig:[2,4,10],box:1,getnam:4,quick:6,dowithconnect:10,look:[2,10,7],obtain:6,keep:10,arrai:[2,10],implement:[2,4,3,9,7],asf:[8,6],our:[2,3,5],"while":[2,9,10],act:0,file_large_read_op:10,"2fuser":10,view:4,su
 ffici:[0,7],guidelin:2,boot:[4,3,10],you:[0,1,2,3,4,5,6,7,8,9,10],stop:[0,4,3,5,9,10,7],regard:6,particular:[2,4],protocol:[3,10],download:[1,8,6],readarrayrecord:2,spark:2,pre:4,system:[1,2,8,4,3,10,6],lifecycl:2,mainten:9,time:[2,4,3,10],couldn:10,dataread:2,"new":[1,2,4,3,5,9,10,7],advanc:8,directroi:1,tolinkid:4,temporari:2,list:[0,2,4,3,5,9],isempti:2,startswith:2,"418c5f637c3f09b94ea7fc3b0a4610831373a25f":[3,10],dirti:6,corrupt:[9,7],executuon:4,snapshot:[3,10],setenv:0,datainput:2,eot:5,allow:[4,3,5,10],recommend:[0,2,9],deflat:[3,10],sqoop_http_port:0,tcp:[3,5],jobcount:10,queri:[3,10],exit:[3,5],semi:6,portion:2,getprogress:4,sqoop_admin_port:0,usr:[0,1,4],report:4,network:10,standard:2,inform:[3,5,9,10,6,7],wrapper:4,file_write_op:10,sqoopexcept:10,depend:[0,1,2,8,4,7],getlinkconfig:4,exampl:[0,2,3,4,5,6,7,8,9,10],less:2,artifact:0,without:[2,3,5,9,6],disabl:[3,5,10,7],bigtop:9,hdfsconnector:[3,10],unknown:10,genericjdbcfrominiti:2,serial:10,numextractor:[4,10],abov:[1,4,3
 ],physic:2,registr:2,receiv:[2,3],msubmiss:4,createlink:4,correspondig:10,action:[5,7],environ:[0,1,2,4,9,6],after:[0,2,8,4,10],failur:[0,9,7],built:[2,7],folder:0,auxiliari:5,"function":[4,5],nov:[3,10],jobrequesthandl:10,hdfs_bytes_written:10,artifactid:4,nullabl:10,moment:[2,8],except:[2,8,4,9,10,6],like:[2,8,4,5,9],verif:[0,9],charact:5,map_output_record:10,conveni:0,littl:[0,2],inputdirectori:10,getintermediatedataformat:2,most:[0,2,3,5],ask:[2,5],let:3,getmessag:4,buffi:4,reconfigur:5,failed_shuffl:10,anoth:[1,8,3,7],thing:[4,10],mind:10,howev:[0,3,9,7],shouldclearstaget:10,placehold:4,chang:[0,2,8,9,10,7],next:[1,2,3,7],fail:[9,10],experi:5,advis:7,perform:[9,7],getbundl:2,aid:2,dir:1,fill:[2,4,3,10],basic:3,getconnectorlinkconfig:4,zero:4,other:[0,2,8,5,9],enter:[2,5],connectorconfigurableupgrad:2,home:[1,5],connect:[0,2,3,5,9,10,6],map_input_record:10,pwd:0,getdriverconfig:4,bash:5,per:[2,4,10],isrun:4,non:[2,4],abl:[0,2,5],find:[0,9,10],vbasavaraj:[3,10],which:[0,2,4,10],p
 lan:0,easier:[0,10],cloudera:[3,10],compound:0,maven:[1,8,4],under:[9,10,6],jid:[4,3,5,10],sqoop:[0,1,2,3,4,5,6,7,8,9,10],generic_hdfs_connector_0000:10,model:2,made:10,drop:2,daemon:0,tarbal:[0,8],displai:[4,3,5],via:[2,10],wiki:6,diagram:2,jira:6,assign:[4,3,10],bodi:10,should:[0,2,4,3,5,9,10,7],hadoopvers:8,page:[0,3,7],inputlist:4,side:[0,5,10],been:[10,7],unind:3,instal:[0,1,8,3,6],interv:4,proper:0,jobid:4,whether:[0,2,4,10],split_raw_byt:10,bytes_written:10,cach:8,"throw":2,appli:10,"final":[2,4,3,10],log:[5,9],"case":[0,2,4,9,7],relat:[0,2,5,10,6],datawrit:2,explicitli:[8,7],encapsul:2,mconfig:4,fromlinkid:4,defin:[2,6],success:[0,1,4,9,7],binari:[0,8,9,7],overload:10,hit:4,"2fvbsqoop":10,therefor:0,updat:[4,3,5,9,10,7],retriev:[4,5,10],updatelink:4,tablenam:[4,10],last:[2,5,10],getlink:4,guid:[8,4,3,10,6,7],tomcat:[0,5,10],immedi:5,dskiptest:1,agreement:6,lname:10,metadata:[9,10],"null":[2,4,3,10],again:7,resourcebundl:[2,4],getcont:2,updatejob:4,kind:6,engin:[2,4,10],jdbcp
 roperti:2,much:8,logic:[2,4,10],current:[2,8,3,5,9],fileinputformatcount:10,make:[0,10],question:2,extra:[0,5],detail:[2,3,5,9,10,6],declips:1,never_execut:10,all:[0,1,2,3,4,5,7,8,9,10],millisecond:5,input:[1,2,4,10,7],faster:8,findjob:10,definit:2,persist:[4,3,5,10,7],underlin:3,upon:[0,9],getlabelkei:4,web:5,total_launched_map:10,foundat:[8,6],parallel:2,getvers:2,sqoopmapdatawrit:2,trace:10,either:[0,2,10,6,7],life:[0,2],extern:[4,3,10],file_read_op:10,exhaust:2,getlinkconfigurationclass:2,callback:4,cluster:[0,10],requestedvers:4,configclass:2,far:10,hdfs_bytes_read:10,project:[1,8,4,3,10,6],jar:[0,2,4],reduc:2,along:[0,10],statement:3,stu:[3,5],direct:[2,4,3],repositori:[1,3,4,5,7,8,9,10],exactli:9,other_local_map:10,suppos:9,visit:[3,5],follow:[0,1,2,3,4,5,6,7,8,9,10],exist:[0,1,3,5,9,10],affect:0,hdf:[0,4,3,10,6],jdk1:1,file_bytes_read:10,begin:5,load:[0,2,5,9,10],close:2,outputformat:[2,10],thrown:4,complet:7,slots_millis_map:10,commun:[3,5],xvf:0,account:10,familiar:10,gete
 xternallink:4,refer:2,minput:4,sqoopnulloutputformat:2,target:[0,8,10],immut:7,unfil:4,destroy:2,cover:[10,7],"return":[2,4,10],net:5,iter:[2,4,3],pst:[3,10],hdfs_read_op:10,locat:[0,1],sqoopoutputformatloadexecutor:2,ignor:[5,10],data:[2,4,3,9,10,6,7],run:[0,2,3,4,5,7,8,9,10],snappi:[3,10],println:4,fulli:2,consid:5,have:[0,2,4,3,5,9,10,7],take:[9,10,7],lastli:[0,3],wai:[2,7],sqooprefactor:10,disrupt:9,simpl:3,fixed_point:10,fromjobconfig:[2,4,10],usag:[3,6],fromjobconfigur:[2,10],dump:9,separ:[0,3,7],config:[0,2,4,3,5,10,7],usernam:[2,4,3,10],upgradetool:[9,7],synchoron:5,copi:[0,6],git:[1,8,10],aslist:2,committed_heap_byt:10,tojob:[2,3,10],name:[1,2,4,3,5,9,10],onc:[4,7],multipl:[0,1,2],delet:[4,3,5,10],collect:2,enabl:[2,3,5,10,7],etc:1,m2_repo:1,singl:0,limit:6,writng:2,base:[3,5],script:5,associ:[2,4,3,5,10],sqoop_bootstrap:0,sqoopinputformat:2,verifytool:[0,9],outputdirectori:[4,10],successfulli:[3,10,7],format:[2,4,3,5,9,10],error:[2,4,10],getconnectorconfigbundl:4,link:[1,2
 ,4,3,5,10,6,7],asynchron:4,genericjdbcconnector:[2,3,10],bytes_read:10,excit:6,oper:[2,4,5,10],conenector:2,block:[4,3],track:[10,6],"2froot":10,connectorid:4,"0_32":1,"default":[0,2,8,3,5,9,10,7],agre:6,blank:[3,10],enough:4,mapreduc:[0,2,4,10],skip:0,possibl:[2,8,10],canproce:4,against:8,minut:[3,6],uset:10,unzip:0,befor:[0,2,4,5,9,10,7],clean:[1,2],see:[0,3,5,9,6,7],setnam:4,setserverurl:4,primarili:2,millis_map:10,bad:10,servlet:9,verbos:5,generic_hdfs_connector_00:10,contributor:6,could:10,vbsqoop:[3,10],modfi:4,becom:2,qualifi:2,specifii:[2,4,3],regist:[2,4,3,5,10],getvalidationstatu:4,repositorydumptool:9,descript:[0,2,4,5,10],lead:9,configur:[0,1,2,3,5,9,10,7],request:[4,10],throttl:10,entir:8,read:[2,4,3,9,10],getstringinput:4,match:[0,2],server:[0,4,3,5,6,7,9,10],help:[2,3,5,10],execut:[0,1,2,8,4,5,9,10,7],dataset:2,variou:[2,8,3,5,9,7],quot:3,valid:[2,4,3,5,10],size:[2,10],genericjdbctodestroy:2,uri:[3,10],sequence_fil:[3,10],even:9,unintent:7,popul:4,correspond:[2,4,10],
 map:[2,3,10],unstructur:6,betweeen:6,sourc:[1,2,8,4,3,10,6],from:[0,2,3,4,5,7,8,9,10],fileoutputformatcount:10,getexternalid:4,driverconfig:4,mapper:4,submiss:[2,4,3,5,9,10],genericjdbcextractor:2,rememb:10,getexceptioninfo:4,older:8,jdbc:[0,2,8,4,3,10],frequent:4,smaller:8,low:4,java:[0,1,4,3,10,6],can:[0,1,2,3,4,5,6,7,8,9,10],peform:10,http:[8,4,3,5,10,6],mislead:2,auto:7,shown:[2,3],object:[2,4,3,5,9,10],everi:[4,10],getpersistenceid:4,compon:[9,7],design:6,revis:[0,8,3,10],sever:5,clone:[8,3,5],someth:[4,10],due:3,keen:6,becaus:0,ddownloadsourc:1,invalid:[5,10],three:7,both:[2,4,3,5,10,7],accur:8,decrib:2,configtyp:2,discuss:7,explan:10,tar:0,condit:6,middl:10,happen:[2,4,10],major:[0,8,3],mvn:[1,8],cname:10,thei:[2,3,5,9,10,6],decid:0,fals:[5,10,7],boundari:[3,10],intermedi:2,commit:2,linkconfigur:[2,10],sqoopi:3},filenames:["Installation","DevEnv","ConnectorDevelopment","Sqoop5MinutesDemo","ClientAPI","CommandLineClient","index","Upgrade","BuildingSqoop2","Tools","RESTAPI"],ti
 tles:["Installation","Sqoop 2 Development Environment Setup","Sqoop 2 Connector Development","Sqoop 5 Minutes Demo","Sqoop Java Client API Guide","Command Line Shell","Apache Sqoop documentation","Upgrade","Building Sqoop2 from source code","Tools","Sqoop REST API Guide"],objnames:{},objects:{},objtypes:{}})
\ No newline at end of file

Added: websites/staging/sqoop/trunk/content/docs/1.99.4/sqoop-docs.iml
==============================================================================
Binary file - no diff available.

Propchange: websites/staging/sqoop/trunk/content/docs/1.99.4/sqoop-docs.iml
------------------------------------------------------------------------------
    svn:mime-type = application/xml

Added: websites/staging/sqoop/trunk/content/docs/1.99.4/src/main/webapp/WEB-INF/web.xml
==============================================================================
Binary file - no diff available.

Propchange: websites/staging/sqoop/trunk/content/docs/1.99.4/src/main/webapp/WEB-INF/web.xml
------------------------------------------------------------------------------
    svn:mime-type = application/xml

Added: websites/staging/sqoop/trunk/content/docs/1.99.4/src/main/webapp/index.html
==============================================================================
--- websites/staging/sqoop/trunk/content/docs/1.99.4/src/main/webapp/index.html (added)
+++ websites/staging/sqoop/trunk/content/docs/1.99.4/src/main/webapp/index.html Tue Nov 25 21:57:10 2014
@@ -0,0 +1,23 @@
+<!DOCTYPE html>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<html>
+<head><title>Apache Sqoop</title></head>
+<body>
+Apache Sqoop
+</body>
+</html>

Added: websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/BuildingSqoop2.rst
==============================================================================
--- websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/BuildingSqoop2.rst (added)
+++ websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/BuildingSqoop2.rst Tue Nov 25 21:57:10 2014
@@ -0,0 +1,69 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+================================
+Building Sqoop2 from source code
+================================
+
+This guide will show you how to build Sqoop2 from source code. Sqoop is using `maven <http://maven.apache.org/>`_ as build system. You you will need to use at least version 3.0 as older versions will not work correctly. All other dependencies will be downloaded by maven automatically. With exception of special JDBC drivers that are needed only for advanced integration tests.
+
+Downloading source code
+-----------------------
+
+Sqoop project is using git as a revision control system hosted at Apache Software Foundation. You can clone entire repository using following command:
+
+::
+
+  git clone https://git-wip-us.apache.org/repos/asf/sqoop.git sqoop2
+
+Sqoop2 is currently developed in special branch ``sqoop2`` that you need to check out after clone:
+
+::
+
+  cd sqoop2
+  git checkout sqoop2
+
+Building project
+----------------
+
+You can use usual maven targets like ``compile`` or ``package`` to build the project. Sqoop supports two major Hadoop revisions at the moment - 1.x and 2.x. As compiled code for one Hadoop major version can't be used on another, you must compile Sqoop against appropriate Hadoop version. You can change the target Hadoop version by specifying ``-Dhadoop.profile=$hadoopVersion`` on the maven command line. Possible values of ``$hadoopVersions`` are 100 and 200 for Hadoop version 1.x and 2.x respectively. Sqoop will compile against Hadoop 2 by default. Following example will compile Sqoop against Hadoop 1.x:
+
+::
+
+  mvn compile -Dhadoop.profile=100
+
+Maven target ``package`` can be used to create Sqoop packages similar to the ones that are officially available for download. Sqoop will build only source tarball by default. You need to specify ``-Pbinary`` to build binary distribution. You might need to explicitly specify Hadoop version if the default is not accurate.
+
+::
+
+  mvn package -Pbinary
+
+Running tests
+-------------
+
+Sqoop supports two different sets of tests. First smaller and much faster set is called unit tests and will be executed on maven target ``test``. Second larger set of integration tests will be executed on maven target ``integration-test``. Please note that integration tests might require manual steps for installing various JDBC drivers into your local maven cache.
+
+Example for running unit tests:
+
+::
+
+  mvn test
+
+Example for running integration tests:
+
+::
+
+  mvn integration-test

Added: websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/ClientAPI.rst
==============================================================================
--- websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/ClientAPI.rst (added)
+++ websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/ClientAPI.rst Tue Nov 25 21:57:10 2014
@@ -0,0 +1,304 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+===========================
+Sqoop Java Client API Guide
+===========================
+
+This document will explain how to use Sqoop Java Client API with external application. Client API allows you to execute the functions of sqoop commands. It requires Sqoop Client JAR and its dependencies.
+
+The main class that provides wrapper methods for all the supported operations is the
+::
+
+  public class SqoopClient {
+    ...
+  }
+
+Java Client API is explained using Generic JDBC Connector example. Before executing the application using the sqoop client API, check whether sqoop server is running.
+
+Workflow
+========
+
+Given workflow has to be followed for executing a sqoop job in Sqoop server.
+
+  1. Create LINK object for a given connectorId             - Creates Link object and returns linkId (lid)
+  2. Create a JOB for a given "from" and "to" linkId            - Create Job object and returns jobId (jid)
+  3. Start the JOB for a given jobId                        - Start Job on the server and creates a submission record
+
+Project Dependencies
+====================
+Here given maven dependency
+
+::
+
+  <dependency>
+    <groupId>org.apache.sqoop</groupId>
+      <artifactId>sqoop-client</artifactId>
+      <version>${requestedVersion}</version>
+  </dependency>
+
+Initialization
+==============
+
+First initialize the SqoopClient class with server URL as argument.
+
+::
+
+  String url = "http://localhost:12000/sqoop/";
+  SqoopClient client = new SqoopClient(url);
+
+Server URL value can be modfied by setting value to setServerUrl(String) method
+
+::
+
+  client.setServerUrl(newUrl);
+
+
+Link
+====
+Connectors provide the facility to interact with many data sources and thus can be used as a means to transfer data between them in Sqoop. The registered connector implementation will provide logic to read from and/or write to a data source that it represents. A connector can have one or more links associated with it. The java client API allows you to create, update and delete a link for any registered connector. Creating or updating a link requires you to populate the Link Config for that particular connector. Hence the first thing to do is get the list of registered connectors and select the connector for which you would like to create a link. Then
+you can get the list of all the config/inputs using `Display Config and Input Names For Connector`_ for that connector.
+
+
+Save Link
+---------
+
+First create a new link by invoking ``createLink(cid)`` method with connector Id and it returns a MLink object with dummy id and the unfilled link config inputs for that connector. Then fill the config inputs with relevant values. Invoke ``saveLink`` passing it the filled MLink object.
+
+::
+
+  // create a placeholder for link
+  long connectorId = 1;
+  MLink link = client.createLink(connectorId);
+  link.setName("Vampire");
+  link.setCreationUser("Buffy");
+  MLinkConfig linkConfig = link.getConnectorLinkConfig();
+  // fill in the link config values
+  linkConfig.getStringInput("linkConfig.connectionString").setValue("jdbc:mysql://localhost/my");
+  linkConfig.getStringInput("linkConfig.jdbcDriver").setValue("com.mysql.jdbc.Driver");
+  linkConfig.getStringInput("linkConfig.username").setValue("root");
+  linkConfig.getStringInput("linkConfig.password").setValue("root");
+  // save the link object that was filled
+  Status status = client.saveLink(link);
+  if(status.canProceed()) {
+   System.out.println("Created Link with Link Id : " + link.getPersistenceId());
+  } else {
+   System.out.println("Something went wrong creating the link");
+  }
+
+``status.canProceed()`` returns true if status is OK or a WARNING. Before sending the status, the link config values are validated using the corresponding validator associated with th link config inputs.
+
+On successful execution of the saveLink method, new link Id is assigned to the link object else an exception is thrown. ``link.getPersistenceId()`` method returns the unique Id for this object persisted in the sqoop repository.
+
+User can retrieve a link using the following methods
+
++----------------------------+--------------------------------------+
+|   Method                   | Description                          |
++============================+======================================+
+| ``getLink(lid)``           | Returns a link by id                 |
++----------------------------+--------------------------------------+
+| ``getLinks()``             | Returns list of links in the sqoop   |
++----------------------------+--------------------------------------+
+
+Job
+===
+
+A sqoop job holds the ``From`` and ``To`` parts for transferring data from the ``From`` data source to the ``To`` data source. Both the ``From`` and the ``To`` are uniquely identified by their corresponding connector Link Ids. i.e when creating a job we have to specifiy the ``FromLinkId`` and the ``ToLinkId``. Thus the pre-requisite for creating a job is to first create the links as described above.
+
+Once the linkIds for the ``From`` and ``To`` are given, then the job configs for the associated connector for the link object have to be filled. You can get the list of all the from and to job config/inputs using `Display Config and Input Names For Connector`_ for that connector. A connector can have one or more links. We then use the links in the ``From`` and ``To`` direction to populate the corresponding ``MFromConfig`` and ``MToConfig`` respectively.
+
+In addition to filling the job configs for the ``From`` and the ``To`` representing the link, we also need to fill the driver configs that control the job execution engine environment. For example, if the job execution engine happens to be the MapReduce we will specifiy the number of mappers to be used in reading data from the ``From`` data source.
+
+Save Job
+---------
+Here is the code to create and then save a job
+::
+
+  String url = "http://localhost:12000/sqoop/";
+  SqoopClient client = new SqoopClient(url);
+  //Creating dummy job object
+  long fromLinkId = 1;// for jdbc connector
+  long toLinkId = 2; // for HDFS connector
+  MJob job = client.createJob(fromLinkId, toLinkId);
+  job.setName("Vampire");
+  job.setCreationUser("Buffy");
+  // set the "FROM" link job config values
+  MFromConfig fromJobConfig = job.getFromJobConfig();
+  fromJobConfig.getStringInput("fromJobConfig.schemaName").setValue("sqoop");
+  fromJobConfig.getStringInput("fromJobConfig.tableName").setValue("sqoop");
+  fromJobConfig.getStringInput("fromJobConfig.partitionColumn").setValue("id");
+  // set the "TO" link job config values
+  MToConfig toJobConfig = job.getToJobConfig();
+  toJobConfig.getStringInput("toJobConfig.outputDirectory").setValue("/usr/tmp");
+  // set the driver config values
+  MDriverConfig driverConfig = job.getDriverConfig();
+  driverConfig.getStringInput("throttlingConfig.numExtractors").setValue("3");
+
+  Status status = client.saveJob(job);
+  if(status.canProceed()) {
+   System.out.println("Created Job with Job Id: "+ job.getPersistenceId());
+  } else {
+   System.out.println("Something went wrong creating the job");
+  }
+
+User can retrieve a job using the following methods
+
++----------------------------+--------------------------------------+
+|   Method                   | Description                          |
++============================+======================================+
+| ``getJob(jid)``            | Returns a job by id                  |
++----------------------------+--------------------------------------+
+| ``getJobs()``              | Returns list of jobs in the sqoop    |
++----------------------------+--------------------------------------+
+
+
+List of status codes
+--------------------
+
++------------------+------------------------------------------------------------------------------------------------------------+
+| Function         | Description                                                                                                |
++==================+============================================================================================================+
+| ``OK``           | There are no issues, no warnings.                                                                          |
++------------------+------------------------------------------------------------------------------------------------------------+
+| ``WARNING``      | Validated entity is correct enough to be proceed. Not a fatal error                                        |
++------------------+------------------------------------------------------------------------------------------------------------+
+| ``ERROR``        | There are serious issues with validated entity. We can't proceed until reported issues will be resolved.   |
++------------------+------------------------------------------------------------------------------------------------------------+
+
+View Error or Warning valdiation message
+----------------------------------------
+
+In case of any WARNING AND ERROR status, user has to iterate the list of validation messages.
+
+::
+
+ printMessage(link.getConnectorLinkConfig().getConfigs());
+
+ private static void printMessage(List<MConfig> configs) {
+   for(MConfig config : configs) {
+     List<MInput<?>> inputlist = config.getInputs();
+     if (config.getValidationMessages() != null) {
+      // print every validation message
+      for(Message message : config.getValidationMessages()) {
+       System.out.println("Config validation message: " + message.getMessage());
+      }
+     }
+     for (MInput minput : inputlist) {
+       if (minput.getValidationStatus() == Status.WARNING) {
+        for(Message message : config.getValidationMessages()) {
+         System.out.println("Config Input Validation Warning: " + message.getMessage());
+       }
+     }
+     else if (minput.getValidationStatus() == Status.ERROR) {
+       for(Message message : config.getValidationMessages()) {
+        System.out.println("Config Input Validation Error: " + message.getMessage());
+       }
+      }
+     }
+    }
+
+Updating link and job
+---------------------
+After creating link or job in the repository, you can update or delete a link or job using the following functions
+
++----------------------------------+------------------------------------------------------------------------------------+
+|   Method                         | Description                                                                        |
++==================================+====================================================================================+
+| ``updateLink(link)``             | Invoke update with link and check status for any errors or warnings                |
++----------------------------------+------------------------------------------------------------------------------------+
+| ``deleteLink(lid)``              | Delete link. Deletes only if specified link is not used by any job                 |
++----------------------------------+------------------------------------------------------------------------------------+
+| ``updateJob(job)``               | Invoke update with job and check status for any errors or warnings                 |
++----------------------------------+------------------------------------------------------------------------------------+
+| ``deleteJob(jid)``               | Delete job                                                                         |
++----------------------------------+------------------------------------------------------------------------------------+
+
+Job Start
+==============
+
+Starting a job requires a job id. On successful start, getStatus() method returns "BOOTING" or "RUNNING".
+
+::
+
+  //Job start
+  long jobId = 1;
+  MSubmission submission = client.startJob(jobId);
+  System.out.println("Job Submission Status : " + submission.getStatus());
+  if(submission.getStatus().isRunning() && submission.getProgress() != -1) {
+    System.out.println("Progress : " + String.format("%.2f %%", submission.getProgress() * 100));
+  }
+  System.out.println("Hadoop job id :" + submission.getExternalId());
+  System.out.println("Job link : " + submission.getExternalLink());
+  Counters counters = submission.getCounters();
+  if(counters != null) {
+    System.out.println("Counters:");
+    for(CounterGroup group : counters) {
+      System.out.print("\t");
+      System.out.println(group.getName());
+      for(Counter counter : group) {
+        System.out.print("\t\t");
+        System.out.print(counter.getName());
+        System.out.print(": ");
+        System.out.println(counter.getValue());
+      }
+    }
+  }
+  if(submission.getExceptionInfo() != null) {
+    System.out.println("Exception info : " +submission.getExceptionInfo());
+  }
+
+
+  //Check job status for a running job 
+  MSubmission submission = client.getJobStatus(jobId);
+  if(submission.getStatus().isRunning() && submission.getProgress() != -1) {
+    System.out.println("Progress : " + String.format("%.2f %%", submission.getProgress() * 100));
+  }
+
+  //Stop a running job
+  submission.stopJob(jobId);
+
+Above code block, job start is asynchronous. For synchronous job start, use ``startJob(jid, callback, pollTime)`` method. If you are not interested in getting the job status, then invoke the same method with "null" as the value for the callback parameter and this returns the final job status. ``pollTime`` is the request interval for getting the job status from sqoop server and the value should be greater than zero. We will frequently hit the sqoop server if a low value is given for the ``pollTime``. When a synchronous job is started with a non null callback, it first invokes the callback's ``submitted(MSubmission)`` method on successful start, after every poll time interval, it then invokes the ``updated(MSubmission)`` method on the callback API and finally on finishing the job executuon it invokes the ``finished(MSubmission)`` method on the callback API.
+
+Display Config and Input Names For Connector
+============================================
+
+You can view the config/input names for the link and job config types per connector
+
+::
+
+  String url = "http://localhost:12000/sqoop/";
+  SqoopClient client = new SqoopClient(url);
+  long connectorId = 1;
+  // link config for connector
+  describe(client.getConnector(connectorId).getLinkConfig().getConfigs(), client.getConnectorConfigBundle(connectorId));
+  // from job config for connector
+  describe(client.getConnector(connectorId).getFromConfig().getConfigs(), client.getConnectorConfigBundle(connectorId));
+  // to job config for the connector
+  describe(client.getConnector(connectorId).getToConfig().getConfigs(), client.getConnectorConfigBundle(connectorId));
+
+  void describe(List<MConfig> configs, ResourceBundle resource) {
+    for (MConfig config : configs) {
+      System.out.println(resource.getString(config.getLabelKey())+":");
+      List<MInput<?>> inputs = config.getInputs();
+      for (MInput input : inputs) {
+        System.out.println(resource.getString(input.getLabelKey()) + " : " + input.getValue());
+      }
+      System.out.println();
+    }
+  }
+
+
+Above Sqoop 2 Client API tutorial explained how to create a link, create job and and then start the job.

Added: websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/CommandLineClient.rst
==============================================================================
--- websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/CommandLineClient.rst (added)
+++ websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/CommandLineClient.rst Tue Nov 25 21:57:10 2014
@@ -0,0 +1,533 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+===================
+Command Line Shell
+===================
+
+Sqoop 2 provides command line shell that is capable of communicating with Sqoop 2 server using REST interface. Client is able to run in two modes - interactive and batch mode. Commands ``create``, ``update`` and ``clone`` are not currently supported in batch mode. Interactive mode supports all available commands.
+
+You can start Sqoop 2 client in interactive mode using command ``sqoop2-shell``::
+
+  sqoop2-shell
+
+Batch mode can be started by adding additional argument representing path to your Sqoop client script: ::
+
+  sqoop2-shell /path/to/your/script.sqoop
+
+Sqoop client script is expected to contain valid Sqoop client commands, empty lines and lines starting with ``#`` that are denoting comment lines. Comments and empty lines are ignored, all other lines are interpreted. Example script: ::
+
+  # Specify company server
+  set server --host sqoop2.company.net
+
+  # Executing given job
+  start job  --jid 1
+
+
+.. contents:: Table of Contents
+
+Resource file
+=============
+
+Sqoop 2 client have ability to load resource files similarly as other command line tools. At the beginning of execution Sqoop client will check existence of file ``.sqoop2rc`` in home directory of currently logged user. If such file exists, it will be interpreted before any additional actions. This file is loaded in both interactive and batch mode. It can be used to execute any batch compatible commands.
+
+Example resource file: ::
+
+  # Configure our Sqoop 2 server automatically
+  set server --host sqoop2.company.net
+
+  # Run in verbose mode by default
+  set option --name verbose --value true
+
+Commands
+========
+
+Sqoop 2 contains several commands that will be documented in this section. Each command have one more functions that are accepting various arguments. Not all commands are supported in both interactive and batch mode.
+
+Auxiliary Commands
+------------------
+
+Auxiliary commands are commands that are improving user experience and are running purely on client side. Thus they do not need working connection to the server.
+
+* ``exit`` Exit client immediately. This command can be also executed by sending EOT (end of transmission) character. It's CTRL+D on most common Linux shells like Bash or Zsh.
+* ``history`` Print out command history. Please note that Sqoop client is saving history from previous executions and thus you might see commands that you've executed in previous runs.
+* ``help`` Show all available commands with short in-shell documentation.
+
+::
+
+ sqoop:000> help
+ For information about Sqoop, visit: http://sqoop.apache.org/
+
+ Available commands:
+   exit    (\x  ) Exit the shell
+   history (\H  ) Display, manage and recall edit-line history
+   help    (\h  ) Display this help message
+   set     (\st ) Configure various client options and settings
+   show    (\sh ) Display various objects and configuration options
+   create  (\cr ) Create new object in Sqoop repository
+   delete  (\d  ) Delete existing object in Sqoop repository
+   update  (\up ) Update objects in Sqoop repository
+   clone   (\cl ) Create new object based on existing one
+   start   (\sta) Start job
+   stop    (\stp) Stop job
+   status  (\stu) Display status of a job
+   enable  (\en ) Enable object in Sqoop repository
+   disable (\di ) Disable object in Sqoop repository
+
+Set Command
+-----------
+
+Set command allows to set various properties of the client. Similarly as auxiliary commands, set do not require connection to Sqoop server. Set commands is not used to reconfigure Sqoop server.
+
+Available functions:
+
++---------------+------------------------------------------+
+| Function      | Description                              |
++===============+==========================================+
+| ``server``    | Set connection configuration for server  |
++---------------+------------------------------------------+
+| ``option``    | Set various client side options          |
++---------------+------------------------------------------+
+
+Set Server Function
+~~~~~~~~~~~~~~~~~~~
+
+Configure connection to Sqoop server - host port and web application name. Available arguments:
+
++-----------------------+---------------+--------------------------------------------------+
+| Argument              | Default value | Description                                      |
++=======================+===============+==================================================+
+| ``-h``, ``--host``    | localhost     | Server name (FQDN) where Sqoop server is running |
++-----------------------+---------------+--------------------------------------------------+
+| ``-p``, ``--port``    | 12000         | TCP Port                                         |
++-----------------------+---------------+--------------------------------------------------+
+| ``-w``, ``--webapp``  | sqoop         | Tomcat's web application name                    |
++-----------------------+---------------+--------------------------------------------------+
+| ``-u``, ``--url``     |               | Sqoop Server in url format                       |
++-----------------------+---------------+--------------------------------------------------+
+
+Example: ::
+
+  set server --host sqoop2.company.net --port 80 --webapp sqoop
+
+or ::
+
+  set server --url http://sqoop2.company.net:80/sqoop
+
+Note: When ``--url`` option is given, ``--host``, ``--port`` or ``--webapp`` option will be ignored.
+
+Set Option Function
+~~~~~~~~~~~~~~~~~~~
+
+Configure Sqoop client related options. This function have two required arguments ``name`` and ``value``. Name represents internal property name and value holds new value that should be set. List of available option names follows:
+
++-------------------+---------------+---------------------------------------------------------------------+
+| Option name       | Default value | Description                                                         |
++===================+===============+=====================================================================+
+| ``verbose``       | false         | Client will print additional information if verbose mode is enabled |
++-------------------+---------------+---------------------------------------------------------------------+
+| ``poll-timeout``  | 10000         | Server poll timeout in milliseconds                                 |
++-------------------+---------------+---------------------------------------------------------------------+
+
+Example: ::
+
+  set option --name verbose --value true
+  set option --name poll-timeout --value 20000
+
+Show Command
+------------
+
+Show commands displays various information as described below.
+
+Available functions:
+
++----------------+--------------------------------------------------------------------------------------------------------+
+| Function       | Description                                                                                            |
++================+========================================================================================================+
+| ``server``     | Display connection information to the sqoop server (host, port, webapp)                                |
++----------------+--------------------------------------------------------------------------------------------------------+
+| ``option``     | Display various client side options                                                                    |
++----------------+--------------------------------------------------------------------------------------------------------+
+| ``version``    | Show client build version, with an option -all it shows server build version and supported api versions|
++----------------+--------------------------------------------------------------------------------------------------------+
+| ``connector``  | Show connector configurable and its related configs                                                    |
++----------------+--------------------------------------------------------------------------------------------------------+
+| ``driver``     | Show driver configurable and its related configs                                                       |
++----------------+--------------------------------------------------------------------------------------------------------+
+| ``link``       | Show links in sqoop                                                                                    |
++----------------+--------------------------------------------------------------------------------------------------------+
+| ``job``        | Show jobs in sqoop                                                                                     |
++----------------+--------------------------------------------------------------------------------------------------------+
+
+Show Server Function
+~~~~~~~~~~~~~~~~~~~~
+
+Show details about connection to Sqoop server.
+
++-----------------------+--------------------------------------------------------------+
+| Argument              |  Description                                                 |
++=======================+==============================================================+
+| ``-a``, ``--all``     | Show all connection related information (host, port, webapp) |
++-----------------------+--------------------------------------------------------------+
+| ``-h``, ``--host``    | Show host                                                    |
++-----------------------+--------------------------------------------------------------+
+| ``-p``, ``--port``    | Show port                                                    |
++-----------------------+--------------------------------------------------------------+
+| ``-w``, ``--webapp``  | Show web application name                                    |
++-----------------------+--------------------------------------------------------------+
+
+Example: ::
+
+  show server --all
+
+Show Option Function
+~~~~~~~~~~~~~~~~~~~~
+
+Show values of various client side options. This function will show all client options when called without arguments.
+
++-----------------------+--------------------------------------------------------------+
+| Argument              |  Description                                                 |
++=======================+==============================================================+
+| ``-n``, ``--name``    | Show client option value with given name                     |
++-----------------------+--------------------------------------------------------------+
+
+Please check table in `Set Option Function`_ section to get a list of all supported option names.
+
+Example: ::
+
+  show option --name verbose
+
+Show Version Function
+~~~~~~~~~~~~~~~~~~~~~
+
+Show build versions of both client and server as well as the supported rest api versions.
+
++------------------------+-----------------------------------------------+
+| Argument               |  Description                                  |
++========================+===============================================+
+| ``-a``, ``--all``      | Show all versions (server, client, api)       |
++------------------------+-----------------------------------------------+
+| ``-c``, ``--client``   | Show client build version                     |
++------------------------+-----------------------------------------------+
+| ``-s``, ``--server``   | Show server build version                     |
++------------------------+-----------------------------------------------+
+| ``-p``, ``--api``      | Show supported api versions                   |
++------------------------+-----------------------------------------------+
+
+Example: ::
+
+  show version --all
+
+Show Connector Function
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Show persisted connector configurable and its related configs used in creating associated link and job objects
+
++-----------------------+------------------------------------------------+
+| Argument              |  Description                                   |
++=======================+================================================+
+| ``-a``, ``--all``     | Show information for all connectors            |
++-----------------------+------------------------------------------------+
+| ``-c``, ``--cid <x>`` | Show information for connector with id ``<x>`` |
++-----------------------+------------------------------------------------+
+
+Example: ::
+
+  show connector --all or show connector
+
+Show Driver Function
+~~~~~~~~~~~~~~~~~~~~
+
+Show persisted driver configurable and its related configs used in creating job objects
+
+This function do not have any extra arguments. There is only one registered driver in sqoop
+
+Example: ::
+
+  show driver
+
+Show Link Function
+~~~~~~~~~~~~~~~~~~
+
+Show persisted link objects.
+
++-----------------------+------------------------------------------------------+
+| Argument              |  Description                                         |
++=======================+======================================================+
+| ``-a``, ``--all``     | Show all available links                             |
++-----------------------+------------------------------------------------------+
+| ``-x``, ``--lid <x>`` | Show link with id ``<x>``                            |
++-----------------------+------------------------------------------------------+
+
+Example: ::
+
+  show link --all or show link
+
+Show Job Function
+~~~~~~~~~~~~~~~~~
+
+Show persisted job objects.
+
++-----------------------+----------------------------------------------+
+| Argument              |  Description                                 |
++=======================+==============================================+
+| ``-a``, ``--all``     | Show all available jobs                      |
++-----------------------+----------------------------------------------+
+| ``-j``, ``--jid <x>`` | Show job with id ``<x>``                     |
++-----------------------+----------------------------------------------+
+
+Example: ::
+
+  show job --all or show job
+
+Show Submission Function
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+Show persisted job submission objects.
+
++-----------------------+---------------------------------------------+
+| Argument              |  Description                                |
++=======================+=============================================+
+| ``-j``, ``--jid <x>`` | Show available submissions for given job    |
++-----------------------+---------------------------------------------+
+| ``-d``, ``--detail``  | Show job submissions in full details        |
++-----------------------+---------------------------------------------+
+
+Example: ::
+
+  show submission
+  show submission --jid 1
+  show submission --jid 1 --detail
+
+Create Command
+--------------
+
+Creates new link and job objects. This command is supported only in interactive mode. It will ask user to enter the link config and job configs for from /to and driver when creating link and job objects respectively.
+
+Available functions:
+
++----------------+-------------------------------------------------+
+| Function       | Description                                     |
++================+=================================================+
+| ``link``       | Create new link object                          |
++----------------+-------------------------------------------------+
+| ``job``        | Create new job object                           |
++----------------+-------------------------------------------------+
+
+Create Link Function
+~~~~~~~~~~~~~~~~~~~~
+
+Create new link object.
+
++------------------------+-------------------------------------------------------------+
+| Argument               |  Description                                                |
++========================+=============================================================+
+| ``-c``, ``--cid <x>``  |  Create new link object for connector with id ``<x>``       |
++------------------------+-------------------------------------------------------------+
+
+
+Example: ::
+
+  create link --cid 1 or create link -c 1
+
+Create Job Function
+~~~~~~~~~~~~~~~~~~~
+
+Create new job object.
+
++------------------------+------------------------------------------------------------------+
+| Argument               |  Description                                                     |
++========================+==================================================================+
+| ``-f``, ``--from <x>`` | Create new job object with a FROM link with id ``<x>``           |
++------------------------+------------------------------------------------------------------+
+| ``-t``, ``--to <t>``   | Create new job object with a TO link with id ``<x>``             |
++------------------------+------------------------------------------------------------------+
+
+Example: ::
+
+  create job --from 1 --to 2 or create job --f 1 --t 2 
+
+Update Command
+--------------
+
+Update commands allows you to edit link and job objects. This command is supported only in interactive mode.
+
+Update Link Function
+~~~~~~~~~~~~~~~~~~~~
+
+Update existing link object.
+
++-----------------------+---------------------------------------------+
+| Argument              |  Description                                |
++=======================+=============================================+
+| ``-x``, ``--lid <x>`` |  Update existing link with id ``<x>``       |
++-----------------------+---------------------------------------------+
+
+Example: ::
+
+  update link --lid 1
+
+Update Job Function
+~~~~~~~~~~~~~~~~~~~
+
+Update existing job object.
+
++-----------------------+--------------------------------------------+
+| Argument              |  Description                               |
++=======================+============================================+
+| ``-j``, ``--jid <x>`` | Update existing job object with id ``<x>`` |
++-----------------------+--------------------------------------------+
+
+Example: ::
+
+  update job --jid 1
+
+
+Delete Command
+--------------
+
+Deletes link and job objects from Sqoop server.
+
+Delete Link Function
+~~~~~~~~~~~~~~~~~~~~
+
+Delete existing link object.
+
++-----------------------+-------------------------------------------+
+| Argument              |  Description                              |
++=======================+===========================================+
+| ``-x``, ``--lid <x>`` |  Delete link object with id ``<x>``       |
++-----------------------+-------------------------------------------+
+
+Example: ::
+
+  delete link --lid 1
+
+
+Delete Job Function
+~~~~~~~~~~~~~~~~~~~
+
+Delete existing job object.
+
++-----------------------+------------------------------------------+
+| Argument              |  Description                             |
++=======================+==========================================+
+| ``-j``, ``--jid <x>`` | Delete job object with id ``<x>``        |
++-----------------------+------------------------------------------+
+
+Example: ::
+
+  delete job --jid 1
+
+
+Clone Command
+-------------
+
+Clone command will load existing link or job object from Sqoop server and allow user in place updates that will result in creation of new link or job object. This command is not supported in batch mode.
+
+Clone Link Function
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Clone existing link object.
+
++-----------------------+------------------------------------------+
+| Argument              |  Description                             |
++=======================+==========================================+
+| ``-x``, ``--lid <x>`` |  Clone link object with id ``<x>``       |
++-----------------------+------------------------------------------+
+
+Example: ::
+
+  clone link --lid 1
+
+
+Clone Job Function
+~~~~~~~~~~~~~~~~~~
+
+Clone existing job object.
+
++-----------------------+------------------------------------------+
+| Argument              |  Description                             |
++=======================+==========================================+
+| ``-j``, ``--jid <x>`` | Clone job object with id ``<x>``         |
++-----------------------+------------------------------------------+
+
+Example: ::
+
+  clone job --jid 1
+
+Start Command
+-------------
+
+Start command will begin execution of an existing Sqoop job.
+
+Start Job Function
+~~~~~~~~~~~~~~~~~~
+
+Start job (submit new submission). Starting already running job is considered as invalid operation.
+
++----------------------------+----------------------------+
+| Argument                   |  Description               |
++============================+============================+
+| ``-j``, ``--jid <x>``      | Start job with id ``<x>``  |
++----------------------------+----------------------------+
+| ``-s``, ``--synchronous``  | Synchoronous job execution |
++----------------------------+----------------------------+
+
+Example: ::
+
+  start job --jid 1
+  start job --jid 1 --synchronous
+
+Stop Command
+------------
+
+Stop command will interrupt an job execution.
+
+Stop Job Function
+~~~~~~~~~~~~~~~~~
+
+Interrupt running job.
+
++-----------------------+------------------------------------------+
+| Argument              |  Description                             |
++=======================+==========================================+
+| ``-j``, ``--jid <x>`` | Interrupt running job with id ``<x>``    |
++-----------------------+------------------------------------------+
+
+Example: ::
+
+  stop job --jid 1
+
+Status Command
+--------------
+
+Status command will retrieve the last status of a job.
+
+Status Job Function
+~~~~~~~~~~~~~~~~~~~
+
+Retrieve last status for given job.
+
++-----------------------+------------------------------------------+
+| Argument              |  Description                             |
++=======================+==========================================+
+| ``-j``, ``--jid <x>`` | Retrieve status for job with id ``<x>``  |
++-----------------------+------------------------------------------+
+
+Example: ::
+
+  status job --jid 1
\ No newline at end of file

Added: websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/ConnectorDevelopment.rst
==============================================================================
--- websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/ConnectorDevelopment.rst (added)
+++ websites/staging/sqoop/trunk/content/docs/1.99.4/src/site/sphinx/ConnectorDevelopment.rst Tue Nov 25 21:57:10 2014
@@ -0,0 +1,456 @@
+.. Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+=============================
+Sqoop 2 Connector Development
+=============================
+
+This document describes how to implement a connector in the Sqoop 2 using the code sample from one of the built-in connectors ( ``GenericJdbcConnector`` ) as a reference. Sqoop 2 jobs support extraction from and/or loading to different data sources. Sqoop 2 connectors encapsulate the job lifecyle operations for extracting and/or loading data from and/or to
+different data sources. Each connector will primarily focus on a particular data source and its custom implementation for optimally reading and/or writing data in a distributed environment.
+
+.. contents::
+
+What is a Sqoop Connector?
+++++++++++++++++++++++++++
+
+Connectors provide the facility to interact with many data sources and thus can be used as a means to transfer data between them in Sqoop. The connector implementation will provide logic to read from and/or write to a data source that it represents. For instance the ( ``GenericJdbcConnector`` ) encapsulates the logic to read from and/or write to jdbc enabled relational data sources. The connector part that enables reading from a data source and transferring this data to internal Sqoop format is called the FROM and the part that enables writng data to a data source by transferring data from Sqoop format is called TO. In order to interact with these data sources, the connector will provide one or many config classes and input fields within it.
+
+Broadly we support two main config types for connectors, link type represented by the enum ``ConfigType.LINK`` and job type represented by the enum ``ConfigType.JOB``. Link config represents the properties to physically connect to the data source. Job config represent the properties that are required to invoke reading from and/or writing to particular dataset in the data source it connects to. If a connector supports both reading from and writing to, it will provide the ``FromJobConfig`` and ``ToJobConfig`` objects. Each of these config objects are custom to each connector and can have one or more inputs associated with each of the Link, FromJob and ToJob config types. Hence we call the connectors as configurables i.e an entity that can provide configs for interacting with the data source it represents. As the connectors evolve over time to support new features in their data sources, the configs and inputs will change as well. Thus the connector API also provides methods for upgradi
 ng the config and input names and data related to these data sources across different versions.
+
+The connectors implement logic for various stages of the extract/load process using the connector API described below. While extracting/reading data from the data-source the main stages are ``Initializer``, ``Partitioner``, ``Extractor`` and ``Destroyer``. While loading/writitng data to the data source the main stages currently supported are ``Initializer``, ``Loader`` and ``Destroyer``. Each stage has its unique set of responsibilities that are explained in detail below. Since connectors understand the internals of the data source they represent, they work in tandem with the sqoop supported execution engines such as MapReduce or Spark (in future) to accomplish this process in a most optimal way.
+
+When do we add a new connector?
+===============================
+You add a new connector when you need to extract/read data from a new data source, or load/write
+data into a new data source that is not supported yet in Sqoop 2.
+In addition to the connector API, Sqoop 2 also has an submission and execution engine interface.
+At the moment the only supported engine is MapReduce, but we may support additional engines in the future such as Spark. Since many parallel execution engines are capable of reading/writing data, there may be a question of whether adding support for a new data source should be done through the connector or the execution engine API.
+
+**Our guideline are as follows:** Connectors should manage all data extract(reading) from and/or load(writing) into a data source. Submission and execution engine together manage the job submission and execution life cycle to read/write data from/to data sources in the most optimal way possible. If you need to support a new data store and details of linking to it and don't care how the process of reading/writing from/to happens then you are looking to add a connector and you should continue reading the below Connector API details to contribute new connectors to Sqoop 2.
+
+
+Connector Implementation
+++++++++++++++++++++++++
+
+The ``SqoopConnector`` class defines an API for the connectors that must be implemented by the connector developers. Each Connector must extend ``SqoopConnector`` and override the methods shown below.
+::
+
+  public abstract String getVersion();
+  public abstract ResourceBundle getBundle(Locale locale);
+  public abstract Class getLinkConfigurationClass();
+  public abstract Class getJobConfigurationClass(Direction direction);
+  public abstract From getFrom();
+  public abstract To getTo();
+  public abstract ConnectorConfigurableUpgrader getConfigurableUpgrader()
+
+Connectors can optionally override the following methods:
+::
+
+  public List<Direction> getSupportedDirections();
+  public Class<? extends IntermediateDataFormat<?>> getIntermediateDataFormat()
+
+
+The ``getFrom`` method returns From_ instance
+which is a ``Transferable`` entity that encapsulates the operations
+needed to read from the data source that the connector represents.
+
+The ``getTo`` method returns To_ instance
+which is a ``Transferable`` entity that encapsulates the operations
+needed to write to the data source that the connector represents.
+
+Methods such as ``getBundle`` , ``getLinkConfigurationClass`` , ``getJobConfigurationClass``
+are related to `Configurations`_
+
+Since a connector represents a data source and it can support one of the two directions, either reading FROM its data source or writing to its data souurce or both, the ``getSupportedDirections`` method returns a list of directions that a connector will implement. This should be a subset of the values in the ``Direction`` enum we provide:
+::
+
+  public List<Direction> getSupportedDirections() {
+      return Arrays.asList(new Direction[]{
+          Direction.FROM,
+          Direction.TO
+      });
+  }
+
+
+From
+====
+
+The ``getFrom`` method returns From_ instance which is a ``Transferable`` entity that encapsulates the operations needed to read from the data source the connector represents. The built-in ``GenericJdbcConnector`` defines ``From`` like this.
+::
+
+  private static final From FROM = new From(
+        GenericJdbcFromInitializer.class,
+        GenericJdbcPartitioner.class,
+        GenericJdbcExtractor.class,
+        GenericJdbcFromDestroyer.class);
+  ...
+
+  @Override
+  public From getFrom() {
+    return FROM;
+  }
+
+Initializer and Destroyer
+-------------------------
+.. _Initializer:
+.. _Destroyer:
+
+Initializer is instantiated before the submission of sqoop job to the execution engine and doing preparations such as connecting to the data source, creating temporary tables or adding dependent jar files. Initializers are executed as the first step in the sqoop job lifecyle. Here is the ``Initializer`` API.
+::
+
+  public abstract void initialize(InitializerContext context, LinkConfiguration linkConfiguration,
+      JobConfiguration jobConfiguration);
+
+  public List<String> getJars(InitializerContext context, LinkConfiguration linkConfiguration,
+      JobConfiguration jobConfiguration);
+
+  public abstract Schema getSchema(InitializerContext context, LinkConfiguration linkConfiguration,
+      JobConfiguration jobConfiguration);
+
+In addition to the initialize() method where the job execution preparation activities occur, the ``Initializer`` must also implement the getSchema() method for the direction it supports. The getSchema() method is used by the sqoop system to match the data extracted/read by the ``From`` instance of connector data source with the data loaded/written to the ``To`` instance of the connector data source. In case of a relational database or columnar database, the returned Schema object will include collection of columns with their data types. If the data source is schema-less, such as a file, an empty Schema can be returned (i.e a Schema object without any columns).
+
+NOTE: Sqoop 2 currently does not support extract and load between two connectors that represent schema-less data sources. We expect that atleast the ``From`` instance of the connector or the ``To`` instance of the connector in the sqoop job will have a schema. If both ``From`` and ``To`` have a associated non empty schema, Sqoop 2 will load data by column name, i.e, data in column "A" in ``From`` instance of the connector for the job will be loaded to column "A" in the ``To`` instance of the connector for that job.
+
+
+``Destroyer`` is instantiated after the execution engine finishes its processing. It is the last step in the sqoop job lifecyle, so pending clean up tasks such as dropping temporary tables and closing connections. The term destroyer is a little misleading. It represents the phase where the final output commits to the data source can also happen in case of the ``TO`` instance of the connector code.
+
+Partitioner
+-----------
+
+The ``Partitioner`` creates ``Partition`` instances ranging from 1..N. The N is driven by a configuration as well. The default set of partitions created is set to 10 in the sqoop code. Here is the ``Partitioner`` API
+
+``Partitioner`` must implement the ``getPartitions`` method in the ``Partitioner`` API.
+
+::
+
+  public abstract List<Partition> getPartitions(PartitionerContext context,
+      LinkConfiguration linkConfiguration, FromJobConfiguration jobConfiguration);
+
+``Partition`` instances are passed to Extractor_ as the argument of ``extract`` method.
+Extractor_ determines which portion of the data to extract by a given partition.
+
+There is no actual convention for Partition classes other than being actually ``Writable`` and ``toString()`` -able. Here is the ``Partition`` API
+::
+
+  public abstract class Partition {
+    public abstract void readFields(DataInput in) throws IOException;
+    public abstract void write(DataOutput out) throws IOException;
+    public abstract String toString();
+  }
+
+Connectors can implement custom ``Partition`` classes. ``GenericJdbcPartitioner`` is one such example. It returns the ``GenericJdbcPartition`` objects.
+
+Extractor
+---------
+
+Extractor (E for ETL) extracts data from a given data source
+``Extractor`` must implement the ``extract`` method in the ``Extractor`` API.
+::
+
+  public abstract void extract(ExtractorContext context,
+                               LinkConfiguration linkConfiguration,
+                               JobConfiguration jobConfiguration,
+                               SqoopPartition partition);
+
+The ``extract`` method extracts data from the data source using the link and job configuration properties and writes it to the ``DataWriter`` (provided by the extractor context) as the default `Intermediate representation`_ .
+
+Extractors use Writer's provided by the ExtractorContext to send a record through the sqoop system.
+::
+
+  context.getDataWriter().writeArrayRecord(array);
+
+The extractor must iterate through the given partition in the ``extract`` method.
+::
+
+  while (resultSet.next()) {
+    ...
+    context.getDataWriter().writeArrayRecord(array);
+    ...
+  }
+
+
+To
+==
+
+The ``getTo`` method returns ``TO`` instance which is a ``Transferable`` entity that encapsulates the operations needed to wtite data to the data source the connector represents. The built-in ``GenericJdbcConnector`` defines ``To`` like this.
+::
+
+  private static final To TO = new To(
+        GenericJdbcToInitializer.class,
+        GenericJdbcLoader.class,
+        GenericJdbcToDestroyer.class);
+  ...
+
+  @Override
+  public To getTo() {
+    return TO;
+  }
+
+
+Initializer and Destroyer
+-------------------------
+
+Initializer_ and Destroyer_ of a ``To`` instance are used in a similar way to those of a ``From`` instance.
+Refer to the previous section for more details.
+
+
+Loader
+------
+
+A loader (L for ETL) receives data from the ``From`` instance of the sqoop connector associated with the sqoop job and then loads it to an ``TO`` instance of the connector associated with the same sqoop job
+
+``Loader`` must implement ``load`` method of the ``Loader`` API
+::
+
+  public abstract void load(LoaderContext context,
+                            ConnectionConfiguration connectionConfiguration,
+                            JobConfiguration jobConfiguration) throws Exception;
+
+The ``load`` method reads data from ``DataReader`` (provided by context) in the default `Intermediate representation`_ and loads it to data source.
+
+Loader must iterate in the ``load`` method until the data from ``DataReader`` is exhausted.
+::
+
+  while ((array = context.getDataReader().readArrayRecord()) != null) {
+    ...
+  }
+
+NOTE: we do not yet support a stage for connector developers to control how to balance the loading/writitng of data across the mutiple loaders. In future we may be adding this to the connector API to have custom logic to balance the loading across multiple reducers.
+
+Configurables
++++++++++++++
+
+Configurable registration
+=========================
+One of the currently supported configurable in Sqoop are the connectors. Sqoop 2 registers definitions of connectors from the file named ``sqoopconnector.properties`` which each connector implementation should provide to become available in Sqoop.
+::
+
+  # Generic JDBC Connector Properties
+  org.apache.sqoop.connector.class = org.apache.sqoop.connector.jdbc.GenericJdbcConnector
+  org.apache.sqoop.connector.name = generic-jdbc-connector
+
+
+Configurations
+==============
+
+Implementations of ``SqoopConnector`` overrides methods such as ``getLinkConfigurationClass`` and ``getJobConfigurationClass`` returning configuration class.
+::
+
+  @Override
+  public Class getLinkConfigurationClass() {
+    return LinkConfiguration.class;
+  }
+
+  @Override
+  public Class getJobConfigurationClass(Direction direction) {
+    switch (direction) {
+      case FROM:
+        return FromJobConfiguration.class;
+      case TO:
+        return ToJobConfiguration.class;
+      default:
+        return null;
+    }
+  }
+
+Configurations are represented by annotations defined in ``org.apache.sqoop.model`` package.
+Annotations such as ``ConfigurationClass`` , ``ConfigClass`` , ``Config`` and ``Input``
+are provided for defining configuration objects for each connector.
+
+``@ConfigurationClass`` is a marker annotation for ``ConfigurationClasses``  that hold a group or lis of ``ConfigClasses`` annotated with the marker ``@ConfigClass``
+::
+
+  @ConfigurationClass
+  public class LinkConfiguration {
+
+    @Config public LinkConfig linkConfig;
+
+    public LinkConfiguration() {
+      linkConfig = new LinkConfig();
+    }
+  }
+
+Each ``ConfigClass`` defines the different inputs it exposes for the link and job configs. These inputs are annotated with ``@Input`` and the user will be asked to fill in when they create a sqoop job and choose to use this instance of the connector for either the ``From`` or ``To`` part of the job.
+
+::
+
+    @ConfigClass(validators = {@Validator(LinkConfig.ConfigValidator.class)})
+    public class LinkConfig {
+      @Input(size = 128, validators = {@Validator(NotEmpty.class), @Validator(ClassAvailable.class)} )
+      @Input(size = 128) public String jdbcDriver;
+      @Input(size = 128) public String connectionString;
+      @Input(size = 40)  public String username;
+      @Input(size = 40, sensitive = true) public String password;
+      @Input public Map<String, String> jdbcProperties;
+    }
+
+Each ``ConfigClass`` and the  inputs within the configs annotated with ``Input`` can specifiy validators via the ``@Validator`` annotation described below.
+
+Empty Configuration
+-------------------
+If a connector does not have any configuration inputs to specify for the ``ConfigType.LINK`` or ``ConfigType.JOB`` it is recommended to return the ``EmptyConfiguration`` class in the ``getLinkConfigurationClass()`` or ``getJobConfigurationClass(..)`` methods.
+::
+
+   @ConfigurationClass
+   public class EmptyConfiguration { }
+
+
+Configuration ResourceBundle
+============================
+
+The config and its corresponding input names, the input field description are represented in the config resource bundle defined per connector.
+::
+
+  # jdbc driver
+  connection.jdbcDriver.label = JDBC Driver Class
+  connection.jdbcDriver.help = Enter the fully qualified class name of the JDBC \
+                     driver that will be used for establishing this connection.
+
+  # connect string
+  connection.connectionString.label = JDBC Connection String
+  connection.connectionString.help = Enter the value of JDBC connection string to be \
+                     used by this connector for creating connections.
+
+  ...
+
+Those resources are loaded by ``getBundle`` method of the ``SqoopConnector.``
+::
+
+  @Override
+  public ResourceBundle getBundle(Locale locale) {
+    return ResourceBundle.getBundle(
+    GenericJdbcConnectorConstants.RESOURCE_BUNDLE_NAME, locale);
+  }
+
+
+Validations for Configs and Inputs
+==================================
+
+Validators validate the config objects and the inputs associated with the config objects. For config objects themselves we encourage developers to write custom valdiators for both the link and job config types.
+
+::
+
+   @Input(size = 128, validators = {@Validator(value = StartsWith.class, strArg = "jdbc:")} )
+
+   @Input(size = 255, validators = { @Validator(NotEmpty.class) })
+
+Sqoop 2 provides a list of standard input validators that can be used by different connectors for the link and job type configuration inputs.
+
+::
+
+    public class NotEmpty extends AbstractValidator<String> {
+    @Override
+    public void validate(String instance) {
+      if (instance == null || instance.isEmpty()) {
+       addMessage(Status.ERROR, "Can't be null nor empty");
+      }
+     }
+    }
+
+The validation logic is executed when users creating the sqoop jobs input values for the link and job configs associated with the ``From`` and ``To`` instances of the connectors associated with the job.
+
+
+Sqoop 2 MapReduce Job Execution Lifecycle with Connector API
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+Sqoop 2 provides MapReduce utilities such as ``SqoopMapper`` and ``SqoopReducer`` that aid sqoop job execution.
+
+Note: Any class prefixed with Sqoop is a internal sqoop class provided for MapReduce and is not part of the conenector API. These internal classes work with the custom implementations of ``Extractor``, ``Partitioner`` in the ``From`` instance and ``Loader`` in the ``To`` instance of the connector.
+
+When reading from a data source, the ``Extractor`` provided by the ``From`` instance of the connector extracts data from a corresponding data source it represents and the ``Loader``, provided by the TO instance of the connector, loads data into the data source it represents.
+
+The diagram below describes the initialization phase of a job.
+``SqoopInputFormat`` create splits using ``Partitioner``.
+::
+
+      ,----------------.          ,-----------.
+      |SqoopInputFormat|          |Partitioner|
+      `-------+--------'          `-----+-----'
+   getSplits  |                         |
+  ----------->|                         |
+              |      getPartitions      |
+              |------------------------>|
+              |                         |         ,---------.
+              |                         |-------> |Partition|
+              |                         |         `----+----'
+              |<- - - - - - - - - - - - |              |
+              |                         |              |          ,----------.
+              |-------------------------------------------------->|SqoopSplit|
+              |                         |              |          `----+-----'
+
+The diagram below describes the map phase of a job.
+``SqoopMapper`` invokes ``From`` connector's extractor's ``extract`` method.
+::
+
+      ,-----------.
+      |SqoopMapper|
+      `-----+-----'
+     run    |
+  --------->|                                   ,------------------.
+            |---------------------------------->|SqoopMapDataWriter|
+            |                                   `------+-----------'
+            |                ,---------.               |
+            |--------------> |Extractor|               |
+            |                `----+----'               |
+            |      extract        |                    |
+            |-------------------->|                    |
+            |                     |                    |
+           read from DB           |                    |
+  <-------------------------------|      write*        |
+            |                     |------------------->|
+            |                     |                    |           ,----.
+            |                     |                    |---------->|Data|
+            |                     |                    |           `-+--'
+            |                     |                    |
+            |                     |                    |      context.write
+            |                     |                    |-------------------------->
+
+The diagram below decribes the reduce phase of a job.
+``OutputFormat`` invokes ``To`` connector's loader's ``load`` method (via ``SqoopOutputFormatLoadExecutor`` ).
+::
+
+    ,------------.  ,---------------------.
+    |SqoopReducer|  |SqoopNullOutputFormat|
+    `---+--------'  `----------+----------'
+        |                 |   ,-----------------------------.
+        |                 |-> |SqoopOutputFormatLoadExecutor|
+        |                 |   `--------------+--------------'        ,----.
+        |                 |                  |---------------------> |Data|
+        |                 |                  |                       `-+--'
+        |                 |                  |   ,-----------------.   |
+        |                 |                  |-> |SqoopRecordWriter|   |
+      getRecordWriter     |                  |   `--------+--------'   |
+  ----------------------->| getRecordWriter  |            |            |
+        |                 |----------------->|            |            |     ,--------------.
+        |                 |                  |-----------------------------> |ConsumerThread|
+        |                 |                  |            |            |     `------+-------'
+        |                 |<- - - - - - - - -|            |            |            |    ,------.
+  <- - - - - - - - - - - -|                  |            |            |            |--->|Loader|
+        |                 |                  |            |            |            |    `--+---'
+        |                 |                  |            |            |            |       |
+        |                 |                  |            |            |            | load  |
+   run  |                 |                  |            |            |            |------>|
+  ----->|                 |     write        |            |            |            |       |
+        |------------------------------------------------>| setContent |            | read* |
+        |                 |                  |            |----------->| getContent |<------|
+        |                 |                  |            |            |<-----------|       |
+        |                 |                  |            |            |            | - - ->|
+        |                 |                  |            |            |            |       | write into DB
+        |                 |                  |            |            |            |       |-------------->
+
+
+
+.. _`Intermediate representation`: https://cwiki.apache.org/confluence/display/SQOOP/Sqoop2+Intermediate+representation



Mime
View raw message