UPGRADE YOUR BROWSER

We have detected your current browser version is not the latest one. Xilinx.com uses the latest web technologies to bring you the best online experience possible. Please upgrade to a Xilinx.com supported browser:Chrome, Firefox, Internet Explorer 11, Safari. Thank you!

cancel
Showing results for 
Search instead for 
Did you mean: 
Participant aichamb
Participant
2,765 Views
Registered: ‎10-03-2017

error: expected identifier before '-' token

hello,i trying to simulate my code c++ in vivado HLS.i get a lot of problem.

#define DB_DEBUG

template <typename T, int DIM1, int DIM2, int DIMa, int DIMb, int DIMc, int DIMd, int DIMe, int DIMf>

void pavilion_hw(T LabelS1[DIM1][DIM2], T  predict_label1[DIMa][DIMb], T  precision1[DIMc][DIMd],T  prob_estimates_t1[DIMe][DIMf])
{
	int SStest1;
	// partition with half dimension size b/c BRAM has two ports
	 SStest1= [-0.674090537952796	-0.967913560624215	-0.946607312926616	-0.949789832193271	-0.955656497278097	-0.938898706383114	-0.961972723505885	-0.943158564395126	-0.924258334652885	-0.987130021868515	-0.961224433510270	-0.979509038935985	-0.982841580894078	-0.968786461033008
-0.661420074510186	-0.953582957005305	-0.976667803306075	-0.852024795662031	-0.923684241437988	-0.908602156779947	-0.961480830721390	-0.943819721336063	-0.961645084746726	-0.975901524455997	-0.964617784738062	-0.985199969033792	-0.979709442173101	-0.949066533256159
-0.796228139367668	-0.761729500498894	-0.922254078572498	-0.835053436355116	-0.927209479313798	-0.955333905733235	-0.924794123637238	-0.959289492589635	-0.930944373099572	-0.938536988120820	-0.976986907828858	-0.949164123093526	-0.970737156105474	-0.947698751805312
-0.760572782759851	-0.831771980500142	-0.945585944378708	-0.908125723416015	-0.942297850129234	-0.938465946137737	-0.968103420998903	-0.977193895073700	-0.969825462569338	-0.981781833938995	-0.975225320029628	-0.966562487844290	-0.979385622636404	-0.969916779098183
-0.682062208785592	-0.901993333331962	-0.911439298246093	-0.995583382410982	-0.950515197199961	-0.939916724225237	-0.951278160425057	-0.951056228844938	-0.962461853366129	-0.977169060764356	-0.975068659001004	-1	-0.981383502203534	-0.960140644534363
-0.764153524047345	-0.782068868778498	-0.930585958249904	-0.899522666144618	-0.961023627518624	-0.905380199817116	-0.931424494746754	-0.911851616413864	-0.955101420749475	-0.978391738284877	-0.977733347990399	-0.957980914548860	-0.987159688960536	-0.958296510208874
-0.741980614232449	-0.708911988880729	-0.854515153854522	-0.844481728371788	-0.939444864580808	-0.921581100661191	-0.931215822177604	-0.924511727085001	-0.934842760732822	-0.944799003697996	-0.949046456708687	-0.969530645790348	-0.981557594662755	-0.966214670867448
-0.373792542593349	-0.520278392032748	-0.703829741405612	-0.811632538345339	-0.843860131190612	-0.867918184900834	-0.861425053147186	-0.921678622630629	-0.848207919776101	-0.952149873411277	-0.933584801186523	-0.956270474264593	-0.972592914683167	-0.953500986248213
-0.661229877813482	-0.663476191336492	-0.875946105955396	-0.813599831284167	-0.927616269612180	-0.947770900356774	-0.944605151964964	-0.949965011099178	-0.951558296918110	-0.959706992151603	-0.934147669647518	-0.959913517244191	-0.976789712492956	-0.957643754265247
-0.518554977026420	-0.602825724193184	-0.883181911007378	-0.866976656576412	-0.881653303448664	-0.834429119509439	-0.938506509068625	-0.792206968065827	-0.884123330616924	-0.917397025318987	-0.938798625979835	-0.925986787255497	-0.986438030341222	-0.961106108508390
-0.530140301878371	-0.883934580851869	-0.929208213377834	-0.981337861458516	-0.942509396789582	-0.918370801892608	-0.946368904151541	-0.968727717519249	-0.965415345904235	-0.992420840751352	-0.960290441628204	-0.987263434783806	-0.980777852550863	-0.961027269770681
-0.614065935999483	-0.643092868497660	-0.849915771303324	-0.923684372369931	-0.899325977629591	-0.940079152274114	-0.943645330258078	-0.966640084204524	-0.954120749867146	-0.992492691029133	-0.942907389517172	-0.986160598733704	-0.961571311375914	-0.944437607583134
-0.586207974039630	-0.655562550483140	-0.848822046603218	-0.828032863687348	-0.979545766504300	-0.954849783433112	-0.933180508531547	-0.901952187183437	-0.935767611336519	-0.947717892133932	-0.963983017763248	-0.936129485635805	-0.962853677964312	-0.939892729210301
-0.698562423991206	-0.828928876308795	-0.930529603172751	-0.973583105511949	-0.922780081115147	-0.950411689148607	-0.922089545604029	-0.902482494918462	-0.919077025971580	-0.938762961947185	-0.946683851734027	-0.943899220923818	-0.970751027958274	-0.962890874229321
-0.792352413710960	-0.846807561570503	-0.962463796461496	-0.938280210895608	-0.970676035685350	-0.950577966455638	-0.982653505409229	-0.964264032678215	-0.961530079570332	-0.984175640092749	-0.990259222509500	-0.957261271696928	-0.982531823910805	-0.975962025172037
-0.794841618144870	-0.829566960381545	-0.965210501288073	-0.932270663074643	-0.967556626906079	-0.946620792181095	-0.972488769216231	-0.962127134647906	-0.957393614634659	-0.981727556097064	-0.987503639619946	-0.953217441593240	-0.981375073603447	-0.973405792323495
-0.795020194095386	-0.806888733270744	-0.960369889000445	-0.915515767489692	-0.957828103336507	-0.939683270081732	-0.951942785566158	-0.957422184175696	-0.947345455495148	-0.975149131137862	-0.978221174145014	-0.944545398095925	-0.978853699190608	-0.968099161607423
-0.698839070385112	-0.854584769884228	-0.879448228263050	-0.898303458884004	-0.970834545509510	-0.997729382049760	-0.938912886553759	-0.968614408138659	-0.974484461898617	-0.985571704652593	-0.967328388395865	-0.972452715835938	-0.983660906019555	-0.963564537278949
-0.673302416738159	-0.879272351929407	-0.968223106643735	-0.972084171937861	-0.959896101364386	-0.961700573227355	-0.969790464510721	-0.954916739185306	-0.945985863949909	-0.975300691304921	-0.968364403832397	-0.977561535571141	-0.980589365529592	-0.954447517556441
-0.637045925923046	-0.710061083800907	-0.936826613792305	-0.859638718802183	-0.912322897550303	-0.981683944934587	-0.890113781116241	-0.913234535285272	-0.894395484221828	-0.922010414470385	-0.957815677576098	-0.941998783974119	-0.969077948566397	-0.955456380822368
-0.0464553899974760	-0.662925986991806	-0.833828153044336	-0.929089542429105	-0.837654390767010	-0.945247889228093	-0.957871446601941	-0.960270138903518	-0.965707812000886	-0.958450094695355	-0.968893174805499	-0.975892896458684	-0.882317284020699	-0.784076824928733
0.168450404584060	-0.438611610872580	-0.739305838353202	-0.753469051869701	-0.872053346050208	-0.902328666401733	-0.875296732403390	-0.891944157498292	-0.875131611862177	-0.876570070893988	-0.916492079359415	-0.892474870559755	-0.880173712101710	-0.818840278283343
0.214956796965889	-0.463100108992908	-0.795731268208176	-0.892232485332773	-0.796178277611433	-0.876064110052591	-0.917887824420658	-0.933514902536318	-0.906487752914575	-0.906161558178618	-0.963182144556198	-0.927398731670949	-0.880132272769419	-0.776628206791729
-0.0355148259693545	-0.103832635363957	-0.642826483479963	-0.675170193621003	-0.726663088806811	-0.743780040787059	-0.831794948409108	-0.812381272649148	-0.818233308995815	-0.936764748241986	-0.927791786953338	-0.946337910236447	-0.865122830148352	-0.803850525738294
-0.0375370422771995	-0.714446611429583	-0.817139932455997	-0.861863433965497	-0.801308409717709	-0.912671943149429	-0.902056680948667	-0.924358383893747	-0.930365403512554	-0.933505439717915	-0.907344668404472	-0.962516466348439	-0.888667439601752	-0.759379503762919
-0.0996422526625035	-0.569498829531724	-0.785394421709639	-0.775105653598853	-0.896482238814758	-0.792471179311656	-0.893821172610422	-0.919322542815255	-0.929090887568931	-0.947222968505341	-0.941521326987315	-0.939564066437628	-0.886581473928985	-0.854622228969523
-0.157947246037774	-0.746922906626790	-0.755491702134737	-0.884559880628624	-0.926822302167037	-0.892694279049245	-0.880997501268835	-0.960323416980551	-0.931254655744972	-0.929470986910981	-0.910748517978109	-0.956965233219925	-0.892806218692971	-0.824323609583577
0.0143100845225228	-0.296971046115063	-0.756037633627588	-0.763254487870567	-0.853796813421072	-0.903855283273275	-0.932880202365020	-0.920300138069262	-0.937750780107648	-0.931802164909103	-0.938127043278968	-0.936648786420969	-0.897396787074019	-0.841443684643823
0.126406089554415	-0.553436242221630	-0.878529200298623	-0.811481080953497	-0.858597903016569	-0.858187238247150	-0.929425420879273	-0.898856593831970	-0.954706925783214	-0.905919892252836	-0.955668439276137	-0.938058984422267	-0.859062344153360	-0.781233364834173
-0.126055537585803	-0.545552869682677	-0.731986017810449	-0.886554196997008	-0.843484118770908	-0.933727925913333	-0.929494256217181	-0.949857908445841	-0.933022166519305	-0.955371701911967	-0.945336532486260	-0.958137376491658	-0.907276661798547	-0.823729901317309
0.0145717851732077	-0.533090337648005	-0.839692084485777	-0.895485971838661	-0.928205663722173	-0.925860301792440	-0.929578060336248	-0.960552985114785	-0.949903531077579	-0.934632681448160	-0.968632836944857	-0.950189964823872	-0.879233119136866	-0.785002723370355
-0.178891784381271	-0.399532717894635	-0.669385253177598	-0.727133395461031	-0.751773861739512	-0.852940528963974	-0.888222736607932	-0.865518628517857	-0.904806207358640	-0.896746617989777	-0.933151592845165	-0.903824933251927	-0.892900090292400	-0.850301284686282
0.339342751347667	-0.168658832220281	-0.701126807517342	-0.705317306144173	-0.766727960954685	-0.805058424745359	-0.908411767678567	-0.850936664511331	-0.938429790986132	-0.897160305989233	-0.966173046779846	-0.901648919962718	-0.923592942739536	-0.812147836977765
-0.165273390468733	-0.202594882499135	-0.685271591575949	-0.641279617027603	-0.810903427104329	-0.721277680480091	-0.878931073742658	-0.770692553521093	-0.915436309034622	-0.845750173546187	-0.920759281404426	-0.849039794164389	-0.883592352877391	-0.786060658867459
-0.174398260956424	-0.628964902893240	-0.851345814848046	-0.929633727074964	-0.912288496191548	-0.952393561588859	-0.957556620703674	-0.966293462428000	-0.967979236795410	-0.967761734377952	-0.968514139800528	-0.966000184471383	-0.873900925904829	-0.804602844673793
0.0912987881509080	-0.562348503567656	-0.544977227135293	-0.496953610164791	-0.665280698891481	-0.825516237438271	-0.764714736496212	-0.825266477368957	-0.778124049781793	-0.889578890948835	-0.860510945808230	-0.889449590530369	-0.899510852733607	-0.817813916256618
-0.159278240258134	-0.562350349373472	-0.672010009362750	-0.702971077603283	-0.791579089506330	-0.773580188614902	-0.868797839368800	-0.901404564649279	-0.883359902797464	-0.882538284811406	-0.902504051837194	-0.888858485690590	-0.907802404057984	-0.860390254926969
-0.115571783442574	-0.364989767126751	-0.715693202021139	-0.720794054845607	-0.734368283253484	-0.821024371768553	-0.864273592837283	-0.884637763632675	-0.905128610959545	-0.911052662127927	-0.934668657036883	-0.959498348008793	-0.948659165025090	-0.857302848890063
-0.119022287702128	-0.435263305220441	-0.674278713268148	-0.717147325691312	-0.786355896498330	-0.803730904718690	-0.918533634832367	-0.871846660926452	-0.930289987345790	-0.945177344298286	-0.948345725996483	-0.900852637961356	-0.927660822404464	-0.829129813555718
-0.243582392888872	-0.456156540815826	-0.882025256314901	-0.878826799838538	-0.875702181196543	-0.952037299706992	-0.922147237385639	-0.953195599137228	-0.988774455168753	-0.956906724025646	-0.990803722658199	-0.962527534612586	-0.917165742920258	-0.857395436225837
0.399888144165083	0.187596602167405	-0.571206951482556	-0.630021295932159	-0.732918080832751	-0.782042578842550	-0.884501484522094	-0.862443525574920	-0.907782413800503	-0.879582251780713	-0.875536974905776	-0.912451877893607	-0.922455195733535	-0.902359364047133
0.556284386426115	0.314036089555063	-0.519960698533138	-0.646049325780951	-0.741233326065650	-0.712137290320001	-0.795024298435714	-0.727879833200862	-0.853933696711804	-0.789936654005493	-0.895739890390449	-0.834987182855880	-0.931084212245965	-0.785223030050557
0.144327924856347	-0.00175945025183155	-0.764123925530958	-0.788116589181975	-0.815023410153421	-0.765757055388867	-0.819440340310641	-0.832834974075190	-0.889065353427949	-0.879626121154264	-0.955003504819721	-0.886941582940570	-0.951457072822665	-0.838292308039416
0.423517043395152	0.207103229391809	-0.537337037899986	-0.719082296323729	-0.730712289172267	-0.803970667494681	-0.776968241650566	-0.770243671002982	-0.870732435883177	-0.790184354464107	-0.849528556769068	-0.857417938148147	-0.914235935637580	-0.862839599386109
0.673400833910405	0.263687282928186	-0.468684958192597	-0.555004829882917	-0.597759466034923	-0.879943048249649	-0.759647494885421	-0.842498114781129	-0.795476315407791	-0.874613706944541	-0.825569610343889	-0.906032284945689	-0.959418030250979	-0.845363247476606
0.183271855620944	0.0211381514220783	-0.502683851137181	-0.636454841163429	-0.786437592022666	-0.805282205840683	-0.813868765137396	-0.807502478689115	-0.789136059855138	-0.787535977735564	-0.823588573107538	-0.833053461976117	-0.925414212506851	-0.879916140090418
0.295845138886807	-0.00395006804240183	-0.548896710154673	-0.673532260911150	-0.743809959983556	-0.766709087110313	-0.792579121413211	-0.812789970935862	-0.888809512385073	-0.865512371317576	-0.866483384552372	-0.910365914309237	-0.922859275559095	-0.805741791608307
0.382230322025496	-0.100544388585754	-0.288045792305930	-0.462878731933967	-0.733635653289924	-0.810774775566528	-0.784311635942323	-0.824278283823241	-0.837674642235770	-0.820294633519681	-0.930289366057359	-0.880333683094064	-0.914809932792345	-0.733615286641321
0.515086084868621	-0.0118463200641755	-0.681312282108688	-0.721728766135035	-0.896943193457623	-0.892492731672196	-0.900895034581712	-0.958202884169137	-0.890317876836114	-0.925894485203484	-0.887102514104963	-0.925150645540907	-0.951009285936294	-0.828633953324099
0.218939003616403	-0.219908089866804	-0.554543937993146	-0.697744593495449	-0.817644527591940	-0.815044931936388	-0.835611927342840	-0.863014952910201	-0.870015506961933	-0.924498109016286	-0.861738324469650	-0.888798266919434	-0.963003761418040	-0.752728018190114
0.379801716860667	0.0803934635748880	-0.348489273860588	-0.443364836987716	-0.686210071710231	-0.749012364568597	-0.868160795860281	-0.905931730794798	-0.827560530714372	-0.843130513494941	-0.874882850617447	-0.898079375435337	-0.958923408320119	-0.826200545619004
0.499909795739691	0.0624123264063490	-0.257162655088657	-0.474302401472459	-0.717398057495245	-0.792006768091720	-0.765814288840469	-0.795160131332406	-0.817978992797693	-0.871336925999567	-0.907076913192798	-0.851666096922091	-0.932313438565331	-0.778005802680850
0.269368264824844	0.0215786140194887	-0.476977706904142	-0.647680815543785	-0.700462589073132	-0.788128770460292	-0.835354852471012	-0.862240914525829	-0.860676230010876	-0.845117177334758	-0.912670838989494	-0.867405287746421	-0.973060275185485	-0.823505423592852
0.179006934566610	0.0585770863310706	-0.729516278647376	-0.736020711155540	-0.833340421570685	-0.863888272177564	-0.858242670121925	-0.908051906398737	-0.888570941463165	-0.900872325708206	-0.872080362074863	-0.886372519170446	-0.948853071328778	-0.841226487206630
0.255490817526987	0.0983271005342115	-0.506833291130341	-0.651349940443935	-0.756337911242788	-0.786870364249337	-0.799083430523079	-0.798476951216588	-0.930360503515967	-0.961047836742245	-0.950977568316625	-0.966198529200948	-0.944896563690946	-0.877086968426192
0.244696633497264	0.124742610239186	-0.00974949294211636	-0.527185835802341	-0.669863996792797	-0.660506846176490	-0.777882938739871	-0.741441779984189	-0.863181488070075	-0.798643774891844	-0.919225101825890	-0.838533769615936	-0.900125302373132	-0.798067298811813
0.154383004639343	-0.135587455441821	-0.474282157787074	-0.545456066636011	-0.647593724862076	-0.755085065020264	-0.761876996351167	-0.835705857964857	-0.887714636576169	-0.869061999970453	-0.882416185426200	-0.923607732584544	-0.959373168720827	-0.838276040824778
0.504849989457597	0.214254376963237	-0.486090671351638	-0.601583503221537	-0.696568826469689	-0.752801035520086	-0.811002254524322	-0.825807713677726	-0.833769846610573	-0.855476282988971	-0.883160491267595	-0.962853439329057	-0.902581371883451	-0.743974014417838
0.445145614467157	-0.0889240542286259	-0.251341749130795	-0.325574329282160	-0.661040242281816	-0.652164091731617	-0.813620922276726	-0.807485140580725	-0.856971911757818	-0.756592971519269	-0.864973897574650	-0.840655456848447	-0.920416268675231	-0.746278678613040
0.434793037813971	-0.0492388032158161	-0.365397844714972	-0.528993051702485	-0.685064834479619	-0.737413635953856	-0.799290876511762	-0.850728865215625	-0.825002878399477	-0.891118614677671	-0.870372516784281	-0.902812350877063	-0.922480285663246	-0.885180878798811]




field1 == "Parameters";   value1 = [1;1;1;10;0.150000000000000]
field2 == "nr_class" ;    value2 = [3]
field3 == "totalSV"  ;    value3 = [38]
field4 == "rho"       ;   value4 = [3.85250072056665;1.98260813354295;-9.85381777928213]
field5 == "Label"      ;  value5 = [1;2;3]
field6 == "sv_indices" ;  value6 = [1;3;11;18;20;22;25;26;29;30;42;43;45;49;50;51;55;56;60;63;65;69;70;71;72;73;75;82;83;90;94;98;102;104;107;110;113;119]
field7 == "ProbA"      ; value7 = [-3.04020468884611;-3.02985537320699;-1.22838128162279]
field8 == "ProbB"      ; value8 = [-0.143832034298135;-0.331592706939798;-0.317353460209991]
field9 == "nSV"         :value9 = [10;17;11]
field10 == "sv_coef"    :value10 = [0.0441014900184046,0;0.115481676752995,0.0247201450973238;0.0284525394158882,0;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0.0429276473187021,0.0247201450973238;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0,2.56566567302770;-0.115481676752995,0;-0.115481676752995,0;-0.115481676752995,0;-0.115481676752995,0;0,2.56566567302770;0,1.19304447300938;-0.115481676752995,0;0,1.37262120001832;0,2.56566567302770;0,2.56566567302770;-0.115481676752995,0;0,2.56566567302770;0,2.56566567302770;0,2.56566567302770;-0.115481676752995,0;-0.115481676752995,0;-0.0247201450973238,-2.56566567302770;-0.0247201450973238,-0.665006705657444;-0.0247201450973238,-2.56566567302770;0,-1.82845371567548;-0.0247201450973238,-2.56566567302770;0,-2.56566567302770;-0.0247201450973238,-2.56566567302770;-0.0247201450973238,-0.106867010440775;0,-1.24158009628657;-0.0247201450973238,-2.56566567302770;-0.0247201450973238,-1.28942381799513]
model1 = struct(field1,value1,field2,value2,field3,value3,field4,value4,field5,value5,field6,value6,field7,value7,field8,value8,field9,value9,field10,value10)


 [predict_label1,precision1,prob_estimates_t1]=svmpredict(LabelS1,SStest1, model1);

	
		return;
		}

the errors are :

 Compiling ../../../../main.cpp in debug mode
../../../../main.cpp: In function 'void pavilion_hw(T (*)[DIM2], T (*)[DIMb], T (*)[DIMd], T (*)[DIMf])':
../../../../main.cpp:122:13: error: expected identifier before '-' token
../../../../main.cpp: In lambda function:
../../../../main.cpp:186:1: error: expected '{' before 'field1'
../../../../main.cpp: In function 'void pavilion_hw(T (*)[DIM2], T (*)[DIMb], T (*)[DIMd], T (*)[DIMf])':
../../../../main.cpp:186:1: warning: lambda expressions only available with -std=c++0x or -std=gnu++0x [enabled by default]
../../../../main.cpp:186:1: error: expected ';' before 'field1'
../../../../main.cpp:186:27: error: 'value1' was not declared in this scope
../../../../main.cpp:186:37: error: expected identifier before numeric constant
../../../../main.cpp:186:38: error: expected ']' before ';' token
../../../../main.cpp: In lambda function:
sim1.PNG
0 Kudos
7 Replies
Scholar u4223374
Scholar
2,753 Views
Registered: ‎04-26-2015

Re: error: expected identifier before '-' token

Unfortunately the answer is "what you've written is nothing like valid C code". Possibly close to Python code, or Matlab?

 

For a start, SStest1 is defined as an integer, but then you're assigning an array to it. In C it would have to be defined as an array, and you can't assign a whole array in C anyway except when you're defining the variable (for later assignments, you have to either use memcpy or set each index individually). Further problems at this point are that C arrays are initialized with braces ("{}"), not square brackets, and C arrays need commas between the elements. From an entirely technical point of view, if you define this array as having integer type, then pretty much the whole thing will end up being rounded to either 0 or -1.

 

A bit lower down you have:

field1 == "Parameters"

This will (a) always return false because C will do an address comparison, and (b) have no effect because all it does is test for equality. Right after this you've got:

value1 = [1;1;1;10;0.150000000000000]

This is totally broken. As above, you can't assign a whole array at once, and arrays need braces. C arrays use commas between elements, not semicolons, and every C assignment needs a semicolon at the end.

 

Right towards the bottom, you have:

[predict_label1,precision1,prob_estimates_t1]=svmpredict...

Again, not valid C. C functions return either zero or one value. They do not return groups/tuples as is possible in Matlab/Python. I would suggest checking the definition of the "svmpredict" function that you're using.

0 Kudos
Participant aichamb
Participant
2,724 Views
Registered: ‎10-03-2017

Re: error: expected identifier before '-' token

 

Hello @u4223374,this is my svmpredict

 

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "svm.h"

#include "mex.h"
#include "svm_model_matlab.h"

#ifdef MX_API_VER
#if MX_API_VER < 0x07030000
typedef int mwIndex;
#endif
#endif

#define CMD_LEN 2048

int print_null(const char *s,...) {}
int (*info)(const char *fmt,...) = &mexPrintf;

void read_sparse_instance(const mxArray *prhs, int index, struct svm_node *x)
{
	int i, j, low, high;
	mwIndex *ir, *jc;
	double *samples;

	ir = mxGetIr(prhs);
	jc = mxGetJc(prhs);
	samples = mxGetPr(prhs);

	// each column is one instance
	j = 0;
	low = (int)jc[index], high = (int)jc[index+1];
	for(i=low;i<high;i++)
	{
		x[j].index = (int)ir[i] + 1;
		x[j].value = samples[i];
		j++;
	}
	x[j].index = -1;
}

static void fake_answer(int nlhs, mxArray *plhs[])
{
	int i;
	for(i=0;i<nlhs;i++)
		plhs[i] = mxCreateDoubleMatrix(0, 0, mxREAL);
}

void predict(int nlhs, mxArray *plhs[], const mxArray *prhs[], struct svm_model *model, const int predict_probability)
{
	int label_vector_row_num, label_vector_col_num;
	int feature_number, testing_instance_number;
	int instance_index;
	double *ptr_instance, *ptr_label, *ptr_predict_label; 
	double *ptr_prob_estimates, *ptr_dec_values, *ptr;
	struct svm_node *x;
	mxArray *pplhs[1]; // transposed instance sparse matrix
	mxArray *tplhs[3]; // temporary storage for plhs[]

	int correct = 0;
	int total = 0;
	double error = 0;
	double sump = 0, sumt = 0, sumpp = 0, sumtt = 0, sumpt = 0;

	int svm_type=svm_get_svm_type(model);
	int nr_class=svm_get_nr_class(model);
	double *prob_estimates=NULL;

	// prhs[1] = testing instance matrix
	feature_number = (int)mxGetN(prhs[1]);
	testing_instance_number = (int)mxGetM(prhs[1]);
	label_vector_row_num = (int)mxGetM(prhs[0]);
	label_vector_col_num = (int)mxGetN(prhs[0]);

	if(label_vector_row_num!=testing_instance_number)
	{
		mexPrintf("Length of label vector does not match # of instances.\n");
		fake_answer(nlhs, plhs);
		return;
	}
	if(label_vector_col_num!=1)
	{
		mexPrintf("label (1st argument) should be a vector (# of column is 1).\n");
		fake_answer(nlhs, plhs);
		return;
	}

	ptr_instance = mxGetPr(prhs[1]);
	ptr_label    = mxGetPr(prhs[0]);

	// transpose instance matrix
	if(mxIsSparse(prhs[1]))
	{
		if(model->param.kernel_type == PRECOMPUTED)
		{
			// precomputed kernel requires dense matrix, so we make one
			mxArray *rhs[1], *lhs[1];
			rhs[0] = mxDuplicateArray(prhs[1]);
			if(mexCallMATLAB(1, lhs, 1, rhs, "full"))
			{
				mexPrintf("Error: cannot full testing instance matrix\n");
				fake_answer(nlhs, plhs);
				return;
			}
			ptr_instance = mxGetPr(lhs[0]);
			mxDestroyArray(rhs[0]);
		}
		else
		{
			mxArray *pprhs[1];
			pprhs[0] = mxDuplicateArray(prhs[1]);
			if(mexCallMATLAB(1, pplhs, 1, pprhs, "transpose"))
			{
				mexPrintf("Error: cannot transpose testing instance matrix\n");
				fake_answer(nlhs, plhs);
				return;
			}
		}
	}

	if(predict_probability)
	{
		if(svm_type==NU_SVR || svm_type==EPSILON_SVR)
			info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=%g\n",svm_get_svr_probability(model));
		else
			prob_estimates = (double *) malloc(nr_class*sizeof(double));
	}

	tplhs[0] = mxCreateDoubleMatrix(testing_instance_number, 1, mxREAL);
	if(predict_probability)
	{
		// prob estimates are in plhs[2]
		if(svm_type==C_SVC || svm_type==NU_SVC)
			tplhs[2] = mxCreateDoubleMatrix(testing_instance_number, nr_class, mxREAL);
		else
			tplhs[2] = mxCreateDoubleMatrix(0, 0, mxREAL);
	}
	else
	{
		// decision values are in plhs[2]
		if(svm_type == ONE_CLASS ||
		   svm_type == EPSILON_SVR ||
		   svm_type == NU_SVR ||
		   nr_class == 1) // if only one class in training data, decision values are still returned.
			tplhs[2] = mxCreateDoubleMatrix(testing_instance_number, 1, mxREAL);
		else
			tplhs[2] = mxCreateDoubleMatrix(testing_instance_number, nr_class*(nr_class-1)/2, mxREAL);
	}

	ptr_predict_label = mxGetPr(tplhs[0]);
	ptr_prob_estimates = mxGetPr(tplhs[2]);
	ptr_dec_values = mxGetPr(tplhs[2]);
	x = (struct svm_node*)malloc((feature_number+1)*sizeof(struct svm_node) );
	for(instance_index=0;instance_index<testing_instance_number;instance_index++)
	{
		int i;
		double target_label, predict_label;

		target_label = ptr_label[instance_index];

		if(mxIsSparse(prhs[1]) && model->param.kernel_type != PRECOMPUTED) // prhs[1]^T is still sparse
			read_sparse_instance(pplhs[0], instance_index, x);
		else
		{
			for(i=0;i<feature_number;i++)
			{
				x[i].index = i+1;
				x[i].value = ptr_instance[testing_instance_number*i+instance_index];
			}
			x[feature_number].index = -1;
		}

		if(predict_probability)
		{
			if(svm_type==C_SVC || svm_type==NU_SVC)
			{
				predict_label = svm_predict_probability(model, x, prob_estimates);
				ptr_predict_label[instance_index] = predict_label;
				for(i=0;i<nr_class;i++)
					ptr_prob_estimates[instance_index + i * testing_instance_number] = prob_estimates[i];
			} else {
				predict_label = svm_predict(model,x);
				ptr_predict_label[instance_index] = predict_label;
			}
		}
		else
		{
			if(svm_type == ONE_CLASS ||
			   svm_type == EPSILON_SVR ||
			   svm_type == NU_SVR)
			{
				double res;
				predict_label = svm_predict_values(model, x, &res);
				ptr_dec_values[instance_index] = res;
			}
			else
			{
				double *dec_values = (double *) malloc(sizeof(double) * nr_class*(nr_class-1)/2);
				predict_label = svm_predict_values(model, x, dec_values);
				if(nr_class == 1) 
					ptr_dec_values[instance_index] = 1;
				else
					for(i=0;i<(nr_class*(nr_class-1))/2;i++)
						ptr_dec_values[instance_index + i * testing_instance_number] = dec_values[i];
				free(dec_values);
			}
			ptr_predict_label[instance_index] = predict_label;
		}

		if(predict_label == target_label)
			++correct;
		error += (predict_label-target_label)*(predict_label-target_label);
		sump += predict_label;
		sumt += target_label;
		sumpp += predict_label*predict_label;
		sumtt += target_label*target_label;
		sumpt += predict_label*target_label;
		++total;
	}
	if(svm_type==NU_SVR || svm_type==EPSILON_SVR)
	{
		info("Mean squared error = %g (regression)\n",error/total);
		info("Squared correlation coefficient = %g (regression)\n",
			((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
			((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt))
			);
	}
	else
		info("Accuracy = %g%% (%d/%d) (classification)\n",
			(double)correct/total*100,correct,total);

	// return accuracy, mean squared error, squared correlation coefficient
	tplhs[1] = mxCreateDoubleMatrix(3, 1, mxREAL);
	ptr = mxGetPr(tplhs[1]);
	ptr[0] = (double)correct/total*100;
	ptr[1] = error/total;
	ptr[2] = ((total*sumpt-sump*sumt)*(total*sumpt-sump*sumt))/
				((total*sumpp-sump*sump)*(total*sumtt-sumt*sumt));

	free(x);
	if(prob_estimates != NULL)
		free(prob_estimates);

	switch(nlhs)
	{
		case 3:
			plhs[2] = tplhs[2];
			plhs[1] = tplhs[1];
		case 1:
		case 0:
			plhs[0] = tplhs[0];
	}
}

void exit_with_help()
{
	mexPrintf(
		"Usage: [predicted_label, accuracy, decision_values/prob_estimates] = svmpredict(testing_label_vector, testing_instance_matrix, model, 'libsvm_options')\n"
		"       [predicted_label] = svmpredict(testing_label_vector, testing_instance_matrix, model, 'libsvm_options')\n"
		"Parameters:\n"
		"  model: SVM model structure from svmtrain.\n"
		"  libsvm_options:\n"
		"    -b probability_estimates: whether to predict probability estimates, 0 or 1 (default 0); one-class SVM not supported yet\n"
		"    -q : quiet mode (no outputs)\n"
		"Returns:\n"
		"  predicted_label: SVM prediction output vector.\n"
		"  accuracy: a vector with accuracy, mean squared error, squared correlation coefficient.\n"
		"  prob_estimates: If selected, probability estimate vector.\n"
	);
}

void mexFunction( int nlhs, mxArray *plhs[],
		 int nrhs, const mxArray *prhs[] )
{
	int prob_estimate_flag = 0;
	struct svm_model *model;
	info = &mexPrintf;

	if(nlhs == 2 || nlhs > 3 || nrhs > 4 || nrhs < 3)
	{
		exit_with_help();
		fake_answer(nlhs, plhs);
		return;
	}

	if(!mxIsDouble(prhs[0]) || !mxIsDouble(prhs[1])) {
		mexPrintf("Error: label vector and instance matrix must be double\n");
		fake_answer(nlhs, plhs);
		return;
	}

	if(mxIsStruct(prhs[2]))
	{
		const char *error_msg;

		// parse options
		if(nrhs==4)
		{
			int i, argc = 1;
			char cmd[CMD_LEN], *argv[CMD_LEN/2];

			// put options in argv[]
			mxGetString(prhs[3], cmd,  mxGetN(prhs[3]) + 1);
			if((argv[argc] = strtok(cmd, " ")) != NULL)
				while((argv[++argc] = strtok(NULL, " ")) != NULL)
					;

			for(i=1;i<argc;i++)
			{
				if(argv[i][0] != '-') break;
				if((++i>=argc) && argv[i-1][1] != 'q')
				{
					exit_with_help();
					fake_answer(nlhs, plhs);
					return;
				}
				switch(argv[i-1][1])
				{
					case 'b':
						prob_estimate_flag = atoi(argv[i]);
						break;
					case 'q':
						i--;
						info = &print_null;
						break;
					default:
						mexPrintf("Unknown option: -%c\n", argv[i-1][1]);
						exit_with_help();
						fake_answer(nlhs, plhs);
						return;
				}
			}
		}

		model = matlab_matrix_to_model(prhs[2], &error_msg);
		if (model == NULL)
		{
			mexPrintf("Error: can't read model: %s\n", error_msg);
			fake_answer(nlhs, plhs);
			return;
		}

		if(prob_estimate_flag)
		{
			if(svm_check_probability_model(model)==0)
			{
				mexPrintf("Model does not support probabiliy estimates\n");
				fake_answer(nlhs, plhs);
				svm_free_and_destroy_model(&model);
				return;
			}
		}
		else
		{
			if(svm_check_probability_model(model)!=0)
				info("Model supports probability estimates, but disabled in predicton.\n");
		}

		predict(nlhs, plhs, prhs, model, prob_estimate_flag);
		// destroy model
		svm_free_and_destroy_model(&model);
	}
	else
	{
		mexPrintf("model file should be a struct array\n");
		fake_answer(nlhs, plhs);
	}

	return;
}
0 Kudos
Scholar u4223374
Scholar
2,713 Views
Registered: ‎04-26-2015

Re: error: expected identifier before '-' token

This is not going to work well. In particular, Matlab has used malloc (not supported by HLS because the FPGA hardware has no way of doing dynamic memory allocation) and double-precision floating-point (slow and resource-intensive). I suspect HLS will also be upset about Matlab's pointer casting, which doesn't make much sense on an FPGA. I suspect a reasonably significant rewrite will be essential just to make it synthesize, and a complete redesign may be required to get any reasonable efficiency/performance from it.

0 Kudos
Participant aichamb
Participant
2,710 Views
Registered: ‎10-03-2017

Re: error: expected identifier before '-' token

thank you @u4223374, i have one more question ,how can i write this data struct"model" with c code. 

field1 == "Parameters";   value1 = [1;1;1;10;0.150000000000000]
field2 == "nr_class" ;    value2 = [3]
field3 == "totalSV"  ;    value3 = [38]
field4 == "rho"       ;   value4 = [3.85250072056665;1.98260813354295;-9.85381777928213]
field5 == "Label"      ;  value5 = [1;2;3]
field6 == "sv_indices" ;  value6 = [1;3;11;18;20;22;25;26;29;30;42;43;45;49;50;51;55;56;60;63;65;69;70;71;72;73;75;82;83;90;94;98;102;104;107;110;113;119]
field7 == "ProbA"      ; value7 = [-3.04020468884611;-3.02985537320699;-1.22838128162279]
field8 == "ProbB"      ; value8 = [-0.143832034298135;-0.331592706939798;-0.317353460209991]
field9 == "nSV"         :value9 = [10;17;11]
field10 == "sv_coef"    :value10 = [0.0441014900184046,0;0.115481676752995,0.0247201450973238;0.0284525394158882,0;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0.0429276473187021,0.0247201450973238;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0,2.56566567302770;-0.115481676752995,0;-0.115481676752995,0;-0.115481676752995,0;-0.115481676752995,0;0,2.56566567302770;0,1.19304447300938;-0.115481676752995,0;0,1.37262120001832;0,2.56566567302770;0,2.56566567302770;-0.115481676752995,0;0,2.56566567302770;0,2.56566567302770;0,2.56566567302770;-0.115481676752995,0;-0.115481676752995,0;-0.0247201450973238,-2.56566567302770;-0.0247201450973238,-0.665006705657444;-0.0247201450973238,-2.56566567302770;0,-1.82845371567548;-0.0247201450973238,-2.56566567302770;0,-2.56566567302770;-0.0247201450973238,-2.56566567302770;-0.0247201450973238,-0.106867010440775;0,-1.24158009628657;-0.0247201450973238,-2.56566567302770;-0.0247201450973238,-1.28942381799513]
model1 = struct(field1,value1,field2,value2,field3,value3,field4,value4,field5,value5,field6,value6,field7,value7,field8,value8,field9,value9,field10,value10)


 [predict_label1,precision1,prob_estimates_t1]=svmpredict(LabelS1,SStest1, model1);
0 Kudos
Scholar u4223374
Scholar
2,693 Views
Registered: ‎04-26-2015

Re: error: expected identifier before '-' token

You'll have to explain to me what it's trying to do, because right now I've got no idea what the purpose of that code is.

0 Kudos
Participant aichamb
Participant
2,686 Views
Registered: ‎10-03-2017

Re: error: expected identifier before '-' token

i want to create a data struct named "model1"whitch containes 10 variables called(field1,...,field10) ,initialised from(value1,..,value10).

each variables has (field,value)

then model1 will be an input for this function 

[predict_label1,precision1,prob_estimates_t1] = svmpredict(LabelS',SStest', model1);

 

 

 

0 Kudos
Participant aichamb
Participant
2,674 Views
Registered: ‎10-03-2017

Re: error: expected identifier before '-' token

hi @u4223374,this is what i'm tolking about,i write this structure now is it correct or not ,if not can you explain 

template <typename T, int DIM1, int DIM2, int DIMa, int DIMb, int DIMc, int DIMd, int DIMe, int DIMf>

void pavilion_hw(T LabelS1[DIM1][DIM2], T  predict_label1[DIMa][DIMb], T  precision1[DIMc][DIMd],T  prob_estimates_t1[DIMe][DIMf])
{
	double SStest1;
	// partition with half dimension size b/c BRAM has two ports
	 SStest1= [-0.674090537952796	-0.967913560624215	-0.946607312926616	-0.949789832193271	-0.955656497278097	-0.938898706383114	-0.961972723505885	-0.943158564395126	-0.924258334652885	-0.987130021868515	-0.961224433510270	-0.979509038935985	-0.982841580894078	-0.968786461033008
-0.661420074510186	-0.953582957005305	-0.976667803306075	-0.852024795662031	-0.923684241437988	-0.908602156779947	-0.961480830721390	-0.943819721336063	-0.961645084746726	-0.975901524455997	-0.964617784738062	-0.985199969033792	-0.979709442173101	-0.949066533256159
-0.796228139367668	-0.761729500498894	-0.922254078572498	-0.835053436355116	-0.927209479313798	-0.955333905733235	-0.924794123637238	-0.959289492589635	-0.930944373099572	-0.938536988120820	-0.976986907828858	-0.949164123093526	-0.970737156105474	-0.947698751805312
-0.760572782759851	-0.831771980500142	-0.945585944378708	-0.908125723416015	-0.942297850129234	-0.938465946137737	-0.968103420998903	-0.977193895073700	-0.969825462569338	-0.981781833938995	-0.975225320029628	-0.966562487844290	-0.979385622636404	-0.969916779098183
-0.682062208785592	-0.901993333331962	-0.911439298246093	-0.995583382410982	-0.950515197199961	-0.939916724225237	-0.951278160425057	-0.951056228844938	-0.962461853366129	-0.977169060764356	-0.975068659001004	-1	-0.981383502203534	-0.960140644534363
-0.764153524047345	-0.782068868778498	-0.930585958249904	-0.899522666144618	-0.961023627518624	-0.905380199817116	-0.931424494746754	-0.911851616413864	-0.955101420749475	-0.978391738284877	-0.977733347990399	-0.957980914548860	-0.987159688960536	-0.958296510208874
-0.741980614232449	-0.708911988880729	-0.854515153854522	-0.844481728371788	-0.939444864580808	-0.921581100661191	-0.931215822177604	-0.924511727085001	-0.934842760732822	-0.944799003697996	-0.949046456708687	-0.969530645790348	-0.981557594662755	-0.966214670867448
-0.373792542593349	-0.520278392032748	-0.703829741405612	-0.811632538345339	-0.843860131190612	-0.867918184900834	-0.861425053147186	-0.921678622630629	-0.848207919776101	-0.952149873411277	-0.933584801186523	-0.956270474264593	-0.972592914683167	-0.953500986248213
-0.661229877813482	-0.663476191336492	-0.875946105955396	-0.813599831284167	-0.927616269612180	-0.947770900356774	-0.944605151964964	-0.949965011099178	-0.951558296918110	-0.959706992151603	-0.934147669647518	-0.959913517244191	-0.976789712492956	-0.957643754265247
-0.518554977026420	-0.602825724193184	-0.883181911007378	-0.866976656576412	-0.881653303448664	-0.834429119509439	-0.938506509068625	-0.792206968065827	-0.884123330616924	-0.917397025318987	-0.938798625979835	-0.925986787255497	-0.986438030341222	-0.961106108508390
-0.530140301878371	-0.883934580851869	-0.929208213377834	-0.981337861458516	-0.942509396789582	-0.918370801892608	-0.946368904151541	-0.968727717519249	-0.965415345904235	-0.992420840751352	-0.960290441628204	-0.987263434783806	-0.980777852550863	-0.961027269770681
-0.614065935999483	-0.643092868497660	-0.849915771303324	-0.923684372369931	-0.899325977629591	-0.940079152274114	-0.943645330258078	-0.966640084204524	-0.954120749867146	-0.992492691029133	-0.942907389517172	-0.986160598733704	-0.961571311375914	-0.944437607583134
-0.586207974039630	-0.655562550483140	-0.848822046603218	-0.828032863687348	-0.979545766504300	-0.954849783433112	-0.933180508531547	-0.901952187183437	-0.935767611336519	-0.947717892133932	-0.963983017763248	-0.936129485635805	-0.962853677964312	-0.939892729210301
-0.698562423991206	-0.828928876308795	-0.930529603172751	-0.973583105511949	-0.922780081115147	-0.950411689148607	-0.922089545604029	-0.902482494918462	-0.919077025971580	-0.938762961947185	-0.946683851734027	-0.943899220923818	-0.970751027958274	-0.962890874229321
-0.792352413710960	-0.846807561570503	-0.962463796461496	-0.938280210895608	-0.970676035685350	-0.950577966455638	-0.982653505409229	-0.964264032678215	-0.961530079570332	-0.984175640092749	-0.990259222509500	-0.957261271696928	-0.982531823910805	-0.975962025172037
-0.794841618144870	-0.829566960381545	-0.965210501288073	-0.932270663074643	-0.967556626906079	-0.946620792181095	-0.972488769216231	-0.962127134647906	-0.957393614634659	-0.981727556097064	-0.987503639619946	-0.953217441593240	-0.981375073603447	-0.973405792323495
-0.795020194095386	-0.806888733270744	-0.960369889000445	-0.915515767489692	-0.957828103336507	-0.939683270081732	-0.951942785566158	-0.957422184175696	-0.947345455495148	-0.975149131137862	-0.978221174145014	-0.944545398095925	-0.978853699190608	-0.968099161607423
-0.698839070385112	-0.854584769884228	-0.879448228263050	-0.898303458884004	-0.970834545509510	-0.997729382049760	-0.938912886553759	-0.968614408138659	-0.974484461898617	-0.985571704652593	-0.967328388395865	-0.972452715835938	-0.983660906019555	-0.963564537278949
-0.673302416738159	-0.879272351929407	-0.968223106643735	-0.972084171937861	-0.959896101364386	-0.961700573227355	-0.969790464510721	-0.954916739185306	-0.945985863949909	-0.975300691304921	-0.968364403832397	-0.977561535571141	-0.980589365529592	-0.954447517556441
-0.637045925923046	-0.710061083800907	-0.936826613792305	-0.859638718802183	-0.912322897550303	-0.981683944934587	-0.890113781116241	-0.913234535285272	-0.894395484221828	-0.922010414470385	-0.957815677576098	-0.941998783974119	-0.969077948566397	-0.955456380822368
-0.0464553899974760	-0.662925986991806	-0.833828153044336	-0.929089542429105	-0.837654390767010	-0.945247889228093	-0.957871446601941	-0.960270138903518	-0.965707812000886	-0.958450094695355	-0.968893174805499	-0.975892896458684	-0.882317284020699	-0.784076824928733
0.168450404584060	-0.438611610872580	-0.739305838353202	-0.753469051869701	-0.872053346050208	-0.902328666401733	-0.875296732403390	-0.891944157498292	-0.875131611862177	-0.876570070893988	-0.916492079359415	-0.892474870559755	-0.880173712101710	-0.818840278283343
0.214956796965889	-0.463100108992908	-0.795731268208176	-0.892232485332773	-0.796178277611433	-0.876064110052591	-0.917887824420658	-0.933514902536318	-0.906487752914575	-0.906161558178618	-0.963182144556198	-0.927398731670949	-0.880132272769419	-0.776628206791729
-0.0355148259693545	-0.103832635363957	-0.642826483479963	-0.675170193621003	-0.726663088806811	-0.743780040787059	-0.831794948409108	-0.812381272649148	-0.818233308995815	-0.936764748241986	-0.927791786953338	-0.946337910236447	-0.865122830148352	-0.803850525738294
-0.0375370422771995	-0.714446611429583	-0.817139932455997	-0.861863433965497	-0.801308409717709	-0.912671943149429	-0.902056680948667	-0.924358383893747	-0.930365403512554	-0.933505439717915	-0.907344668404472	-0.962516466348439	-0.888667439601752	-0.759379503762919
-0.0996422526625035	-0.569498829531724	-0.785394421709639	-0.775105653598853	-0.896482238814758	-0.792471179311656	-0.893821172610422	-0.919322542815255	-0.929090887568931	-0.947222968505341	-0.941521326987315	-0.939564066437628	-0.886581473928985	-0.854622228969523
-0.157947246037774	-0.746922906626790	-0.755491702134737	-0.884559880628624	-0.926822302167037	-0.892694279049245	-0.880997501268835	-0.960323416980551	-0.931254655744972	-0.929470986910981	-0.910748517978109	-0.956965233219925	-0.892806218692971	-0.824323609583577
0.0143100845225228	-0.296971046115063	-0.756037633627588	-0.763254487870567	-0.853796813421072	-0.903855283273275	-0.932880202365020	-0.920300138069262	-0.937750780107648	-0.931802164909103	-0.938127043278968	-0.936648786420969	-0.897396787074019	-0.841443684643823
0.126406089554415	-0.553436242221630	-0.878529200298623	-0.811481080953497	-0.858597903016569	-0.858187238247150	-0.929425420879273	-0.898856593831970	-0.954706925783214	-0.905919892252836	-0.955668439276137	-0.938058984422267	-0.859062344153360	-0.781233364834173
-0.126055537585803	-0.545552869682677	-0.731986017810449	-0.886554196997008	-0.843484118770908	-0.933727925913333	-0.929494256217181	-0.949857908445841	-0.933022166519305	-0.955371701911967	-0.945336532486260	-0.958137376491658	-0.907276661798547	-0.823729901317309
0.0145717851732077	-0.533090337648005	-0.839692084485777	-0.895485971838661	-0.928205663722173	-0.925860301792440	-0.929578060336248	-0.960552985114785	-0.949903531077579	-0.934632681448160	-0.968632836944857	-0.950189964823872	-0.879233119136866	-0.785002723370355
-0.178891784381271	-0.399532717894635	-0.669385253177598	-0.727133395461031	-0.751773861739512	-0.852940528963974	-0.888222736607932	-0.865518628517857	-0.904806207358640	-0.896746617989777	-0.933151592845165	-0.903824933251927	-0.892900090292400	-0.850301284686282
0.339342751347667	-0.168658832220281	-0.701126807517342	-0.705317306144173	-0.766727960954685	-0.805058424745359	-0.908411767678567	-0.850936664511331	-0.938429790986132	-0.897160305989233	-0.966173046779846	-0.901648919962718	-0.923592942739536	-0.812147836977765
-0.165273390468733	-0.202594882499135	-0.685271591575949	-0.641279617027603	-0.810903427104329	-0.721277680480091	-0.878931073742658	-0.770692553521093	-0.915436309034622	-0.845750173546187	-0.920759281404426	-0.849039794164389	-0.883592352877391	-0.786060658867459
-0.174398260956424	-0.628964902893240	-0.851345814848046	-0.929633727074964	-0.912288496191548	-0.952393561588859	-0.957556620703674	-0.966293462428000	-0.967979236795410	-0.967761734377952	-0.968514139800528	-0.966000184471383	-0.873900925904829	-0.804602844673793
0.0912987881509080	-0.562348503567656	-0.544977227135293	-0.496953610164791	-0.665280698891481	-0.825516237438271	-0.764714736496212	-0.825266477368957	-0.778124049781793	-0.889578890948835	-0.860510945808230	-0.889449590530369	-0.899510852733607	-0.817813916256618
-0.159278240258134	-0.562350349373472	-0.672010009362750	-0.702971077603283	-0.791579089506330	-0.773580188614902	-0.868797839368800	-0.901404564649279	-0.883359902797464	-0.882538284811406	-0.902504051837194	-0.888858485690590	-0.907802404057984	-0.860390254926969
-0.115571783442574	-0.364989767126751	-0.715693202021139	-0.720794054845607	-0.734368283253484	-0.821024371768553	-0.864273592837283	-0.884637763632675	-0.905128610959545	-0.911052662127927	-0.934668657036883	-0.959498348008793	-0.948659165025090	-0.857302848890063
-0.119022287702128	-0.435263305220441	-0.674278713268148	-0.717147325691312	-0.786355896498330	-0.803730904718690	-0.918533634832367	-0.871846660926452	-0.930289987345790	-0.945177344298286	-0.948345725996483	-0.900852637961356	-0.927660822404464	-0.829129813555718
-0.243582392888872	-0.456156540815826	-0.882025256314901	-0.878826799838538	-0.875702181196543	-0.952037299706992	-0.922147237385639	-0.953195599137228	-0.988774455168753	-0.956906724025646	-0.990803722658199	-0.962527534612586	-0.917165742920258	-0.857395436225837
0.399888144165083	0.187596602167405	-0.571206951482556	-0.630021295932159	-0.732918080832751	-0.782042578842550	-0.884501484522094	-0.862443525574920	-0.907782413800503	-0.879582251780713	-0.875536974905776	-0.912451877893607	-0.922455195733535	-0.902359364047133
0.556284386426115	0.314036089555063	-0.519960698533138	-0.646049325780951	-0.741233326065650	-0.712137290320001	-0.795024298435714	-0.727879833200862	-0.853933696711804	-0.789936654005493	-0.895739890390449	-0.834987182855880	-0.931084212245965	-0.785223030050557
0.144327924856347	-0.00175945025183155	-0.764123925530958	-0.788116589181975	-0.815023410153421	-0.765757055388867	-0.819440340310641	-0.832834974075190	-0.889065353427949	-0.879626121154264	-0.955003504819721	-0.886941582940570	-0.951457072822665	-0.838292308039416
0.423517043395152	0.207103229391809	-0.537337037899986	-0.719082296323729	-0.730712289172267	-0.803970667494681	-0.776968241650566	-0.770243671002982	-0.870732435883177	-0.790184354464107	-0.849528556769068	-0.857417938148147	-0.914235935637580	-0.862839599386109
0.673400833910405	0.263687282928186	-0.468684958192597	-0.555004829882917	-0.597759466034923	-0.879943048249649	-0.759647494885421	-0.842498114781129	-0.795476315407791	-0.874613706944541	-0.825569610343889	-0.906032284945689	-0.959418030250979	-0.845363247476606
0.183271855620944	0.0211381514220783	-0.502683851137181	-0.636454841163429	-0.786437592022666	-0.805282205840683	-0.813868765137396	-0.807502478689115	-0.789136059855138	-0.787535977735564	-0.823588573107538	-0.833053461976117	-0.925414212506851	-0.879916140090418
0.295845138886807	-0.00395006804240183	-0.548896710154673	-0.673532260911150	-0.743809959983556	-0.766709087110313	-0.792579121413211	-0.812789970935862	-0.888809512385073	-0.865512371317576	-0.866483384552372	-0.910365914309237	-0.922859275559095	-0.805741791608307
0.382230322025496	-0.100544388585754	-0.288045792305930	-0.462878731933967	-0.733635653289924	-0.810774775566528	-0.784311635942323	-0.824278283823241	-0.837674642235770	-0.820294633519681	-0.930289366057359	-0.880333683094064	-0.914809932792345	-0.733615286641321
0.515086084868621	-0.0118463200641755	-0.681312282108688	-0.721728766135035	-0.896943193457623	-0.892492731672196	-0.900895034581712	-0.958202884169137	-0.890317876836114	-0.925894485203484	-0.887102514104963	-0.925150645540907	-0.951009285936294	-0.828633953324099
0.218939003616403	-0.219908089866804	-0.554543937993146	-0.697744593495449	-0.817644527591940	-0.815044931936388	-0.835611927342840	-0.863014952910201	-0.870015506961933	-0.924498109016286	-0.861738324469650	-0.888798266919434	-0.963003761418040	-0.752728018190114
0.379801716860667	0.0803934635748880	-0.348489273860588	-0.443364836987716	-0.686210071710231	-0.749012364568597	-0.868160795860281	-0.905931730794798	-0.827560530714372	-0.843130513494941	-0.874882850617447	-0.898079375435337	-0.958923408320119	-0.826200545619004
0.499909795739691	0.0624123264063490	-0.257162655088657	-0.474302401472459	-0.717398057495245	-0.792006768091720	-0.765814288840469	-0.795160131332406	-0.817978992797693	-0.871336925999567	-0.907076913192798	-0.851666096922091	-0.932313438565331	-0.778005802680850
0.269368264824844	0.0215786140194887	-0.476977706904142	-0.647680815543785	-0.700462589073132	-0.788128770460292	-0.835354852471012	-0.862240914525829	-0.860676230010876	-0.845117177334758	-0.912670838989494	-0.867405287746421	-0.973060275185485	-0.823505423592852
0.179006934566610	0.0585770863310706	-0.729516278647376	-0.736020711155540	-0.833340421570685	-0.863888272177564	-0.858242670121925	-0.908051906398737	-0.888570941463165	-0.900872325708206	-0.872080362074863	-0.886372519170446	-0.948853071328778	-0.841226487206630
0.255490817526987	0.0983271005342115	-0.506833291130341	-0.651349940443935	-0.756337911242788	-0.786870364249337	-0.799083430523079	-0.798476951216588	-0.930360503515967	-0.961047836742245	-0.950977568316625	-0.966198529200948	-0.944896563690946	-0.877086968426192
0.244696633497264	0.124742610239186	-0.00974949294211636	-0.527185835802341	-0.669863996792797	-0.660506846176490	-0.777882938739871	-0.741441779984189	-0.863181488070075	-0.798643774891844	-0.919225101825890	-0.838533769615936	-0.900125302373132	-0.798067298811813
0.154383004639343	-0.135587455441821	-0.474282157787074	-0.545456066636011	-0.647593724862076	-0.755085065020264	-0.761876996351167	-0.835705857964857	-0.887714636576169	-0.869061999970453	-0.882416185426200	-0.923607732584544	-0.959373168720827	-0.838276040824778
0.504849989457597	0.214254376963237	-0.486090671351638	-0.601583503221537	-0.696568826469689	-0.752801035520086	-0.811002254524322	-0.825807713677726	-0.833769846610573	-0.855476282988971	-0.883160491267595	-0.962853439329057	-0.902581371883451	-0.743974014417838
0.445145614467157	-0.0889240542286259	-0.251341749130795	-0.325574329282160	-0.661040242281816	-0.652164091731617	-0.813620922276726	-0.807485140580725	-0.856971911757818	-0.756592971519269	-0.864973897574650	-0.840655456848447	-0.920416268675231	-0.746278678613040
0.434793037813971	-0.0492388032158161	-0.365397844714972	-0.528993051702485	-0.685064834479619	-0.737413635953856	-0.799290876511762	-0.850728865215625	-0.825002878399477	-0.891118614677671	-0.870372516784281	-0.902812350877063	-0.922480285663246	-0.885180878798811]


{
	struct parameters =[1;1;1;10;0.150000000000000];
	int nr_class=[3];	/* number of classes, = 2 in regression/one class svm */
	int l= [38];			/* total #SV */
	//struct svm_node **SV;		/* SVs (SV[l]) */
	double **sv_coef = [0.0441014900184046,0;0.115481676752995,0.0247201450973238;0.0284525394158882,0;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0.0429276473187021,0.0247201450973238;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0.115481676752995,0.0247201450973238;0,2.56566567302770;-0.115481676752995,0;-0.115481676752995,0;-0.115481676752995,0;-0.115481676752995,0;0,2.56566567302770;0,1.19304447300938;-0.115481676752995,0;0,1.37262120001832;0,2.56566567302770;0,2.56566567302770;-0.115481676752995,0;0,2.56566567302770;0,2.56566567302770;0,2.56566567302770;-0.115481676752995,0;-0.115481676752995,0;-0.0247201450973238,-2.56566567302770;-0.0247201450973238,-0.665006705657444;-0.0247201450973238,-2.56566567302770;0,-1.82845371567548;-0.0247201450973238,-2.56566567302770;0,-2.56566567302770;-0.0247201450973238,-2.56566567302770;-0.0247201450973238,-0.106867010440775;0,-1.24158009628657;-0.0247201450973238,-2.56566567302770;-0.0247201450973238,-1.28942381799513]	/* coefficients for SVs in decision functions (sv_coef[k-1][l]) */
	double *rho= [3.85250072056665;1.98260813354295;-9.85381777928213];		/* constants in decision functions (rho[k*(k-1)/2]) */
	double *probA= [-3.04020468884611;-3.02985537320699;-1.22838128162279];		/* pariwise probability information */
	double *probB= [-0.143832034298135;-0.331592706939798;-0.317353460209991];
	int *sv_indices= [1;3;11;18;20;22;25;26;29;30;42;43;45;49;50;51;55;56;60;63;65;69;70;71;72;73;75;82;83;90;94;98;102;104;107;110;113;119];       /* sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to indicate SVs in the training set */

	/* for classification only */

	int *label= [1;2;3];	/* label of each class (label[k]) */
	int *nSV= [10;17;11];	/* number of SVs for each class (nSV[k]) */
				/* nSV[0] + nSV[1] + ... + nSV[k-1] = l */
	/* XXX */
	int free_sv;		/* 1 if svm_model is created by svm_load_model*/
				/* 0 if svm_model is created by svm_train */
};
 [predict_label1,precision1,prob_estimates_t1]=SVMPredict(LabelS1,SStest1, model);

	
		return;

		}
0 Kudos