
    <Dialog file="">
        <Body>
            <Topics/>
            <Turn nickname="(#user1#)" genid="5">
                <Utterance genid="6" ref="-1" time="12:19:31" date="09/12/2007" oldid="5" color="" topic="">Hello</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="7">
                <Utterance genid="8" ref="-1" time="12:20:13" date="09/12/2007" oldid="6" color="" topic="">hi</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="10">
                <Utterance genid="11" ref="-1" time="12:21:19" date="09/12/2007" oldid="8" color="" topic="">so, we have two Razvan?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="12">
                <Utterance genid="13" ref="-1" time="12:21:44" date="09/12/2007" oldid="9" color="" topic="">Razvan is bogus; first time the panel didn't loaded properly</Utterance>
                <Utterance genid="14" ref="-1" time="12:21:59" date="09/12/2007" oldid="10" color="" topic="">and I had to close the application</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="15">
                <Utterance genid="16" ref="-1" time="12:22:05" date="09/12/2007" oldid="11" color="" topic="">ok</Utterance>
                <Utterance genid="17" ref="-1" time="12:22:14" date="09/12/2007" oldid="12" color="" topic="">shall we start?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="18">
                <Utterance genid="19" ref="-1" time="12:22:30" date="09/12/2007" oldid="13" color="" topic="">so I'm with the NAIVE BAYES MODEL</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="20">
                <Utterance genid="21" ref="-1" time="12:22:46" date="09/12/2007" oldid="14" color="" topic="">I am responsible with Maximum Entropy</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="22">
                <Utterance genid="23" ref="-1" time="12:23:04" date="09/12/2007" oldid="15" color="" topic="">I have Hidden Markov Model</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="24">
                <Utterance genid="25" ref="-1" time="12:23:34" date="09/12/2007" oldid="16" color="" topic="">and i will talk about SVM (support vecror machine)</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="27">
                <Utterance genid="28" ref="-1" time="12:23:54" date="09/12/2007" oldid="18" color="" topic="">ok, who wants to start?</Utterance>
                <Utterance genid="29" ref="-1" time="12:24:04" date="09/12/2007" oldid="19" color="" topic="">ladies?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="30">
                <Utterance genid="31" ref="29" time="12:24:29" date="09/12/2007" oldid="20" color="" topic="">I agree: ladies first!</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="32">
                <Utterance genid="33" ref="-1" time="12:26:17" date="09/12/2007" oldid="21" color="" topic="">do you want me to start first?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="34">
                <Utterance genid="35" ref="-1" time="12:26:35" date="09/12/2007" oldid="22" color="" topic="">go ahead</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="36">
                <Utterance genid="37" ref="-1" time="12:28:39" date="09/12/2007" oldid="23" color="" topic="">Ok, I see no one is brave enough to start</Utterance>
                <Utterance genid="38" ref="-1" time="12:29:53" date="09/12/2007" oldid="24" color="" topic="">Naive Bayes Model is based on simple probabilistic classfiers that use Bayes' theorem with strong (naive) independence assumption.</Utterance>
                <Utterance genid="39" ref="-1" time="12:30:19" date="09/12/2007" oldid="25" color="" topic="">Something like "independent feature model".</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="40">
                <Utterance genid="41" ref="-1" time="12:31:06" date="09/12/2007" oldid="26" color="" topic="">aha</Utterance>
                <Utterance genid="42" ref="-1" time="12:31:09" date="09/12/2007" oldid="27" color="" topic="">go ahead</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="43">
                <Utterance genid="44" ref="-1" time="12:31:23" date="09/12/2007" oldid="28" color="" topic="">The probability model for a classifier is a conditional model.</Utterance>
                <Utterance genid="45" ref="-1" time="12:31:38" date="09/12/2007" oldid="29" color="" topic="">p(C| F1,.....,Fn)</Utterance>
                <Utterance genid="46" ref="-1" time="12:32:27" date="09/12/2007" oldid="30" color="" topic="">a dependent class variable C and several variables: F1- Fn.</Utterance>
                <Utterance genid="47" ref="-1" time="12:33:01" date="09/12/2007" oldid="31" color="" topic="">The model can be reformulated to be more tractable in case of a large number of variables:</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="48">
                <Utterance genid="49" ref="-1" time="12:33:13" date="09/12/2007" oldid="32" color="" topic="">what are the variables used for?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="50">
                <Utterance genid="51" ref="-1" time="12:33:46" date="09/12/2007" oldid="33" color="" topic="">= p(C)*p(F1,....Fn|C)/p(F1,....,Fn)</Utterance>
                <Utterance genid="52" ref="-1" time="12:35:38" date="09/12/2007" oldid="34" color="" topic="">Variables are used to evaluate the occurence of C in a certain environment</Utterance>
                <Utterance genid="53" ref="-1" time="12:35:59" date="09/12/2007" oldid="35" color="" topic="">I is something like:</Utterance>
                <Utterance genid="54" ref="-1" time="12:36:19" date="09/12/2007" oldid="36" color="" topic="">Posterior = Prior*Likelihood/Evidence</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="55">
                <Utterance genid="56" ref="-1" time="12:37:47" date="09/12/2007" oldid="37" color="" topic="">ok, i got it</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="57">
                <Utterance genid="58" ref="-1" time="12:38:02" date="09/12/2007" oldid="38" color="" topic="">the future state is determined by the last state combined with the favorable factors and the sure events</Utterance>
                <Utterance genid="59" ref="-1" time="12:38:29" date="09/12/2007" oldid="39" color="" topic="">What are the caracteristics of the other models?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="60">
                <Utterance genid="61" ref="-1" time="12:39:38" date="09/12/2007" oldid="40" color="" topic="">anyone?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="62">
                <Utterance genid="63" ref="-1" time="12:40:15" date="09/12/2007" oldid="41" color="" topic="">:) What is interesting about Maximum Entropy?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="64">
                <Utterance genid="65" ref="-1" time="12:40:33" date="09/12/2007" oldid="42" color="" topic="">so</Utterance>
                <Utterance genid="66" ref="-1" time="12:42:39" date="09/12/2007" oldid="43" color="" topic="">The Maximum Entropy is a method for analyzing the available information in order to determine a unique epistemic probability distribution</Utterance>
                <Utterance genid="67" ref="-1" time="12:43:27" date="09/12/2007" oldid="44" color="" topic="">epistemic probability represents the probability of a statement being "true" given the total evidence available to you</Utterance>
                <Utterance genid="68" ref="-1" time="12:44:35" date="09/12/2007" oldid="45" color="" topic="">Maximum Entropy states that the least biased distribution that encodes certain given information is the one that maximizes the information entropy</Utterance>
                <Utterance genid="69" ref="-1" time="12:45:56" date="09/12/2007" oldid="46" color="" topic="">Information Entropy , or Shannon entropy is a therm used in information theory and represents a measure of the uncertainty associated with a random variable</Utterance>
                <Utterance genid="70" ref="-1" time="12:46:29" date="09/12/2007" oldid="47" color="" topic="">ME principle is only useful when applied to "testable information"</Utterance>
                <Utterance genid="71" ref="-1" time="12:48:13" date="09/12/2007" oldid="48" color="" topic="">And I mean by "testable information" the amount of info which can be determined wheter a given distribution is consistent with it</Utterance>
                <Utterance genid="72" ref="-1" time="12:48:32" date="09/12/2007" oldid="49" color="" topic="">An example for this would be:</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="73">
                <Utterance genid="74" ref="-1" time="12:48:44" date="09/12/2007" oldid="50" color="" topic="">So you need large amounts of test data to determine corect asumptions?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="75">
                <Utterance genid="76" ref="-1" time="12:48:52" date="09/12/2007" oldid="51" color="" topic="">"the expectation of variable x is 1.46"</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="77">
                <Utterance genid="78" ref="69" time="12:50:06" date="09/12/2007" oldid="52" color="" topic="">u mean having some data, u can predict with some probability that an event is going to happen?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="79">
                <Utterance genid="80" ref="-1" time="12:50:31" date="09/12/2007" oldid="53" color="" topic="">that's right Razvan</Utterance>
                <Utterance genid="81" ref="-1" time="12:51:37" date="09/12/2007" oldid="54" color="" topic="">yes Eliana, that's right</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="82">
                <Utterance genid="83" ref="-1" time="12:51:38" date="09/12/2007" oldid="55" color="" topic="">that is also true the NBM</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="84">
                <Utterance genid="85" ref="-1" time="12:52:05" date="09/12/2007" oldid="56" color="" topic="">I think all models are based on probabilities</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="86">
                <Utterance genid="87" ref="-1" time="12:52:44" date="09/12/2007" oldid="57" color="" topic="">it relies on the principle that when nothing is known, the probability distribution should be uniform, so having maximum entropy</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="88">
                <Utterance genid="89" ref="85" time="12:54:07" date="09/12/2007" oldid="58" color="" topic="">well, not quite</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="90">
                <Utterance genid="91" ref="89" time="12:54:35" date="09/12/2007" oldid="59" color="" topic="">Can you give more details?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="92">
                <Utterance genid="93" ref="91" time="12:56:25" date="09/12/2007" oldid="60" color="" topic="">so</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="94">
                <Utterance genid="95" ref="-1" time="12:56:26" date="09/12/2007" oldid="61" color="" topic="">(#user0#) do you want to add something alese to ME?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="96">
                <Utterance genid="97" ref="-1" time="12:57:40" date="09/12/2007" oldid="62" color="" topic="">given testable information, the ME consists of seeking the probability distribution wich maximizes information entropy, subject to the constraints of the information</Utterance>
                <Utterance genid="98" ref="-1" time="12:58:41" date="09/12/2007" oldid="63" color="" topic="">and we solve the problem of constrained optimization by using the method of Lagrange multipliers</Utterance>
                <Utterance genid="99" ref="-1" time="12:58:53" date="09/12/2007" oldid="64" color="" topic="">that's all for the moment</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="100">
                <Utterance genid="101" ref="-1" time="12:59:16" date="09/12/2007" oldid="65" color="" topic="">Ok, let's hear someting about SVN and HMM</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="102">
                <Utterance genid="103" ref="-1" time="12:59:17" date="09/12/2007" oldid="66" color="" topic="">let's have some descriptions for the 2 other classification methods</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="104">
                <Utterance genid="105" ref="-1" time="12:59:26" date="09/12/2007" oldid="67" color="" topic="">OK.Than I will start to talk about HMM</Utterance>
                <Utterance genid="106" ref="-1" time="12:59:36" date="09/12/2007" oldid="68" color="" topic="">*THEN</Utterance>
                <Utterance genid="107" ref="-1" time="01:02:49" date="09/12/2007" oldid="69" color="" topic="">The HMM is a statistical model. The modeled system it's assumed to be a Markov process. We have to types of parameters: hidden and observable. The chalenge is to determine the hidden parameters.</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="108">
                <Utterance genid="109" ref="-1" time="01:04:32" date="09/12/2007" oldid="70" color="" topic="">what is a Markov process?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="110">
                <Utterance genid="111" ref="-1" time="01:06:28" date="09/12/2007" oldid="71" color="" topic="">The HMM is used in many fields: speech recognition, optical character recognition, machine traslation, partial discharge,bioinformatics and so on..</Utterance>
                <Utterance genid="112" ref="109" time="01:10:34" date="09/12/2007" oldid="72" color="" topic="">It is a stochastic process that have the folowing property: the value of a hidden variable X(t) (at time t) only depends on the value of the hidden variable X(t-1)(at time t - 1) and the value of observable parameter Y(t) (at time t) only depends on the value of the hidden variable X(t) (at time t).</Utterance>
                <Utterance genid="113" ref="-1" time="01:10:53" date="09/12/2007" oldid="73" color="" topic="">this property is known as Markov property</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="114">
                <Utterance genid="115" ref="-1" time="01:11:31" date="09/12/2007" oldid="74" color="" topic="">ok, thank you</Utterance>
                <Utterance genid="116" ref="-1" time="01:11:39" date="09/12/2007" oldid="75" color="" topic="">now about the svm</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="117">
                <Utterance genid="118" ref="-1" time="01:12:19" date="09/12/2007" oldid="76" color="" topic="">We're listening... (sort of)</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="119">
                <Utterance genid="120" ref="-1" time="01:13:19" date="09/12/2007" oldid="77" color="" topic="">svm states for support vector machines</Utterance>
                <Utterance genid="121" ref="-1" time="01:14:31" date="09/12/2007" oldid="78" color="" topic="">it is a supervised learning technique, meaning that we have to train the machine first</Utterance>
                <Utterance genid="122" ref="-1" time="01:15:42" date="09/12/2007" oldid="79" color="" topic="">(#user2#), what is this?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="123">
                <Utterance genid="124" ref="-1" time="01:16:18" date="09/12/2007" oldid="80" color="" topic="">:) it is for HMM</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="125">
                <Utterance genid="126" ref="122" time="01:16:31" date="09/12/2007" oldid="81" color="" topic="">Probabilistic parameters of a hidden Markov model</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="127">
                <Utterance genid="128" ref="-1" time="01:16:38" date="09/12/2007" oldid="82" color="" topic="">after you finish i want to have an example</Utterance>
                <Utterance genid="129" ref="-1" time="01:17:13" date="09/12/2007" oldid="83" color="" topic="">because i think the two models presented by (#user1#) and (#user0#) could be integrated in HMM</Utterance>
                <Utterance genid="130" ref="-1" time="01:17:38" date="09/12/2007" oldid="84" color="" topic="">sorry, go on..</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="131">
                <Utterance genid="132" ref="129" time="01:17:40" date="09/12/2007" oldid="85" color="" topic="">i agree with that</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="133">
                <Utterance genid="134" ref="128" time="01:17:47" date="09/12/2007" oldid="86" color="" topic="">you can have it now, i will need the whiteboard, too</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="135">
                <Utterance genid="136" ref="-1" time="01:17:57" date="09/12/2007" oldid="87" color="" topic="">i also read some articles related to this</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="137">
                <Utterance genid="138" ref="-1" time="01:18:10" date="09/12/2007" oldid="88" color="" topic="">ok</Utterance>
                <Utterance genid="139" ref="-1" time="01:18:40" date="09/12/2007" oldid="89" color="" topic="">i'll put it after you finish with SVM</Utterance>
                <Utterance genid="140" ref="-1" time="01:18:52" date="09/12/2007" oldid="90" color="" topic="">go on with SVN :)</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="141">
                <Utterance genid="142" ref="-1" time="01:18:55" date="09/12/2007" oldid="91" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="143">
                <Utterance genid="144" ref="-1" time="01:19:20" date="09/12/2007" oldid="92" color="" topic="">and then will have the discussion about integration of the 4 models</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="145">
                <Utterance genid="146" ref="-1" time="01:20:10" date="09/12/2007" oldid="93" color="" topic="">back to svm. They are a method for creating functions from a set of labeled training data. The function can be a classification function (the output is binary: is the input in a category) or the function can be a general regression function.</Utterance>
                <Utterance genid="147" ref="-1" time="01:20:58" date="09/12/2007" oldid="94" color="" topic="">i don't know if u remember neural networks</Utterance>
                <Utterance genid="148" ref="-1" time="01:21:13" date="09/12/2007" oldid="95" color="" topic="">SVM are in some ways similar to them</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="149">
                <Utterance genid="150" ref="-1" time="01:21:22" date="09/12/2007" oldid="96" color="" topic="">Yes, used them for text recognition</Utterance>
                <Utterance genid="151" ref="-1" time="01:21:57" date="09/12/2007" oldid="97" color="" topic="">so it is possible to implement SVM as a neural network?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="152">
                <Utterance genid="153" ref="-1" time="01:22:03" date="09/12/2007" oldid="98" color="" topic="">svm are also used in pattern matching, hand written text...</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="154">
                <Utterance genid="155" ref="153" time="01:22:46" date="09/12/2007" oldid="99" color="" topic="">so we might have OCR implementations using SVM method</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="156">
                <Utterance genid="157" ref="155" time="01:23:40" date="09/12/2007" oldid="100" color="" topic=""/>
                <Utterance genid="158" ref="-1" time="01:23:44" date="09/12/2007" oldid="101" color="" topic="">I think so, it's that right Eliana?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="159">
                <Utterance genid="160" ref="-1" time="01:24:50" date="09/12/2007" oldid="102" color="" topic="">of course. many forms of svm are equivalent with multy layer perceptron neural networks</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="161">
                <Utterance genid="162" ref="-1" time="01:25:29" date="09/12/2007" oldid="103" color="" topic="">aha, i see</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="163">
                <Utterance genid="164" ref="-1" time="01:25:32" date="09/12/2007" oldid="104" color="" topic="">a SVM performs classification by constructing an N-dimensional hyperplane that optimally separates the data into two categories.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="165">
                <Utterance genid="166" ref="-1" time="01:25:34" date="09/12/2007" oldid="105" color="" topic="">So above are several uses for SVM</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="167">
                <Utterance genid="168" ref="166" time="01:26:48" date="09/12/2007" oldid="106" color="" topic="">what do you mean by that?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="169">
                <Utterance genid="170" ref="-1" time="01:27:46" date="09/12/2007" oldid="107" color="" topic="">I wanted to say that some applications for SVM were specified</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="171">
                <Utterance genid="172" ref="-1" time="01:28:02" date="09/12/2007" oldid="108" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="173">
                <Utterance genid="174" ref="-1" time="01:28:06" date="09/12/2007" oldid="109" color="" topic="">NB can be used in anti-spam filters, for example</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="175">
                <Utterance genid="176" ref="164" time="01:28:31" date="09/12/2007" oldid="110" color="" topic="">Often we are interested in classifying data as a part of a machine-learning process. Each data point will be represented by a p-dimensional vector (a list of p numbers). Each of these data points belongs to only one of two classes. We are interested in whether we can separate them with a "p minus 1" dimensional hyperplane. let's have a 2-dimensional example</Utterance>
                <Utterance genid="177" ref="-1" time="01:28:47" date="09/12/2007" oldid="111" color="" topic="">i'll try to sketch it on the board</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="178">
                <Utterance genid="179" ref="177" time="01:29:03" date="09/12/2007" oldid="112" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="180">
                <Utterance genid="181" ref="174" time="01:29:43" date="09/12/2007" oldid="113" color="" topic="">By counting the words in messages and taking into account the probabillity of each one to appear in a spam message</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="182">
                <Utterance genid="183" ref="181" time="01:30:17" date="09/12/2007" oldid="114" color="" topic="">Aha, I was wondering how it's that possible</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="184">
                <Utterance genid="185" ref="181" time="01:30:39" date="09/12/2007" oldid="115" color="" topic="">anyway, is a good application</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="186">
                <Utterance genid="187" ref="-1" time="01:30:51" date="09/12/2007" oldid="116" color="" topic="">it is possible the compute the chances of it to be SPAM or NOT</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="188">
                <Utterance genid="189" ref="-1" time="01:31:00" date="09/12/2007" oldid="117" color="" topic="">as we are all facing spam problems</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="190">
                <Utterance genid="191" ref="-1" time="01:31:21" date="09/12/2007" oldid="118" color="" topic="">:) that is true</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="192">
                <Utterance genid="193" ref="-1" time="01:31:34" date="09/12/2007" oldid="119" color="" topic="">:)</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="194">
                <Utterance genid="195" ref="187" time="01:32:12" date="09/12/2007" oldid="120" color="" topic="">In the case of two mutual exclusive alternatives, the conversion of the probability is a sigmoid curve ( http://en.wikipedia.org/wiki/Sigmoid_curve )</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="196">
                <Utterance genid="197" ref="-1" time="01:32:25" date="09/12/2007" oldid="121" color="" topic="">i just wanted to add that recently, it has been shown that Bayes' Rule and the Principle of Maximum Entropy are completely compatible and can be seen as special cases of the Method of Maximum (relative) Entropy (ME)</Utterance>
                <Utterance genid="198" ref="-1" time="01:35:00" date="09/12/2007" oldid="122" color="" topic="">and, as (#user2#) said before HMM can the algorithms of NB and ME</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="199">
                <Utterance genid="200" ref="-1" time="01:35:17" date="09/12/2007" oldid="123" color="" topic="">yes.I wait Eliana to finish</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="201">
                <Utterance genid="202" ref="-1" time="01:35:26" date="09/12/2007" oldid="124" color="" topic="">ok</Utterance>
                <Utterance genid="203" ref="-1" time="01:35:38" date="09/12/2007" oldid="125" color="" topic="">then i will tell you more</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="204">
                <Utterance genid="205" ref="-1" time="01:35:42" date="09/12/2007" oldid="126" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="206">
                <Utterance genid="207" ref="-1" time="01:36:13" date="09/12/2007" oldid="127" color="" topic="">i draw a training set</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="208">
                <Utterance genid="209" ref="-1" time="01:36:26" date="09/12/2007" oldid="128" color="" topic="">i can't see anything</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="210">
                <Utterance genid="211" ref="-1" time="01:36:32" date="09/12/2007" oldid="129" color="" topic="">we have two classes</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="212">
                <Utterance genid="213" ref="-1" time="01:36:33" date="09/12/2007" oldid="130" color="" topic="">:(</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="214">
                <Utterance genid="215" ref="-1" time="01:36:45" date="09/12/2007" oldid="131" color="" topic="">one contains x-objects</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="216">
                <Utterance genid="217" ref="207" time="01:36:49" date="09/12/2007" oldid="132" color="" topic="">i saw it but it appears and disappears</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="218">
                <Utterance genid="219" ref="207" time="01:37:00" date="09/12/2007" oldid="133" color="" topic="">i don't see anything</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="220">
                <Utterance genid="221" ref="-1" time="01:37:36" date="09/12/2007" oldid="134" color="" topic="">and the other one contains some closed curbes objects</Utterance>
                <Utterance genid="222" ref="-1" time="01:38:17" date="09/12/2007" oldid="135" color="" topic="">scroll down in your white boards</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="223">
                <Utterance genid="224" ref="-1" time="01:38:39" date="09/12/2007" oldid="136" color="" topic="">there is nothing</Utterance>
                <Utterance genid="225" ref="-1" time="01:39:19" date="09/12/2007" oldid="137" color="" topic="">(#user0#) can you see the drawing?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="226">
                <Utterance genid="227" ref="-1" time="01:40:16" date="09/12/2007" oldid="138" color="" topic="">no</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="228">
                <Utterance genid="229" ref="-1" time="01:41:15" date="09/12/2007" oldid="139" color="" topic="">let me try importing a jpg</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="230">
                <Utterance genid="231" ref="-1" time="01:41:24" date="09/12/2007" oldid="140" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="232">
                <Utterance genid="233" ref="-1" time="01:43:11" date="09/12/2007" oldid="141" color="" topic="">can u see the image?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="234">
                <Utterance genid="235" ref="-1" time="01:43:30" date="09/12/2007" oldid="142" color="" topic="">no</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="236">
                <Utterance genid="237" ref="-1" time="01:43:55" date="09/12/2007" oldid="143" color="" topic="">i only saw a green rectangle but it disappeared</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="238">
                <Utterance genid="239" ref="-1" time="01:44:25" date="09/12/2007" oldid="144" color="" topic="">same here</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="240">
                <Utterance genid="241" ref="233" time="01:44:29" date="09/12/2007" oldid="145" color="" topic="">now a blue one</Utterance>
                <Utterance genid="242" ref="-1" time="01:44:38" date="09/12/2007" oldid="146" color="" topic="">hmmm</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="243">
                <Utterance genid="244" ref="-1" time="01:44:56" date="09/12/2007" oldid="147" color="" topic="">i don't see anything</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="245">
                <Utterance genid="246" ref="235" time="01:45:00" date="09/12/2007" oldid="148" color="" topic="">seams that when i click on now i see</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="247">
                <Utterance genid="248" ref="-1" time="01:45:09" date="09/12/2007" oldid="149" color="" topic="">and i think that it is bcz you put a refernce</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="249">
                <Utterance genid="250" ref="246" time="01:45:23" date="09/12/2007" oldid="150" color="" topic="">now i see</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="251">
                <Utterance genid="252" ref="-1" time="01:45:27" date="09/12/2007" oldid="151" color="" topic="">me too</Utterance>
                <Utterance genid="253" ref="-1" time="01:46:06" date="09/12/2007" oldid="152" color="" topic="">Eliana could you explain the picture?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="254">
                <Utterance genid="255" ref="-1" time="01:48:08" date="09/12/2007" oldid="153" color="" topic="">i talked about p minus 1" dimensional hyperplane; in a 2 dimansional space, we can consider that a hyperplane is a line</Utterance>
                <Utterance genid="256" ref="-1" time="01:49:40" date="09/12/2007" oldid="154" color="" topic="">in the example in the left side of the image, we have a line separating the two set of objects</Utterance>
                <Utterance genid="257" ref="-1" time="01:50:27" date="09/12/2007" oldid="155" color="" topic="">te catch is that the separator is equally distanced from the two sets</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="258">
                <Utterance genid="259" ref="256" time="01:50:42" date="09/12/2007" oldid="156" color="" topic="">do the angle of the separation lines have any importance?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="260">
                <Utterance genid="261" ref="-1" time="01:50:52" date="09/12/2007" oldid="157" color="" topic="">[we have the dotted lines to prove that]</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="262">
                <Utterance genid="263" ref="-1" time="01:51:08" date="09/12/2007" oldid="158" color="" topic="">because the only difference between the two pistures seems to be the angle</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="264">
                <Utterance genid="265" ref="-1" time="01:51:11" date="09/12/2007" oldid="159" color="" topic="">wait and u shall find out</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="266">
                <Utterance genid="267" ref="-1" time="01:51:15" date="09/12/2007" oldid="160" color="" topic="">ok</Utterance>
                <Utterance genid="268" ref="-1" time="01:51:16" date="09/12/2007" oldid="161" color="" topic="">:)</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="269">
                <Utterance genid="270" ref="-1" time="01:53:05" date="09/12/2007" oldid="162" color="" topic="">the distance between the line in the middle and the dooted lines is the distance betweenn the separator and each of the two categories</Utterance>
                <Utterance genid="271" ref="-1" time="01:53:26" date="09/12/2007" oldid="163" color="" topic="">we want to find that separator that maximizes that distnace</Utterance>
                <Utterance genid="272" ref="-1" time="01:54:05" date="09/12/2007" oldid="164" color="" topic="">and we have it in the image in the right side</Utterance>
                <Utterance genid="273" ref="-1" time="01:54:19" date="09/12/2007" oldid="165" color="" topic="">A special property of SVMs is that they simultaneously minimize the empirical classification error and maximize the geometric margin; hence they are also known as maximum margin classifiers.</Utterance>
                <Utterance genid="274" ref="-1" time="01:54:41" date="09/12/2007" oldid="166" color="" topic="">why do we want to maximize that distance?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="275">
                <Utterance genid="276" ref="271" time="01:54:50" date="09/12/2007" oldid="167" color="" topic="">what is the importance of having that maximum distance?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="277">
                <Utterance genid="278" ref="-1" time="01:55:03" date="09/12/2007" oldid="168" color="" topic="">remember, this is only the training set</Utterance>
                <Utterance genid="279" ref="-1" time="01:56:20" date="09/12/2007" oldid="169" color="" topic="">after the learning phaze is over, my classifier will be able to correctly classify objects/items that are similar, but not identical to the ones in the train set</Utterance>
                <Utterance genid="280" ref="-1" time="01:57:13" date="09/12/2007" oldid="170" color="" topic="">those objects coud be between th eseparating line and one of the dotted line</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="281">
                <Utterance genid="282" ref="-1" time="01:58:36" date="09/12/2007" oldid="171" color="" topic="">so you mean similar objects are located in that area, right?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="283">
                <Utterance genid="284" ref="-1" time="01:58:56" date="09/12/2007" oldid="172" color="" topic="">right</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="285">
                <Utterance genid="286" ref="274" time="01:59:43" date="09/12/2007" oldid="173" color="" topic="">how about this</Utterance>
                <Utterance genid="287" ref="-1" time="01:59:46" date="09/12/2007" oldid="174" color="" topic="">?</Utterance>
                <Utterance genid="288" ref="-1" time="02:00:20" date="09/12/2007" oldid="175" color="" topic="">the larger the distance, the more similar objects exist?</Utterance>
                <Utterance genid="289" ref="-1" time="02:00:32" date="09/12/2007" oldid="176" color="" topic="">is this the explanation?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="290">
                <Utterance genid="291" ref="288" time="02:02:31" date="09/12/2007" oldid="177" color="" topic="">not quite; the larger the distance, more similar objects are correctly classified</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="292">
                <Utterance genid="293" ref="-1" time="02:03:07" date="09/12/2007" oldid="178" color="" topic="">i understand</Utterance>
                <Utterance genid="294" ref="-1" time="02:03:11" date="09/12/2007" oldid="179" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="295">
                <Utterance genid="296" ref="-1" time="02:04:04" date="09/12/2007" oldid="180" color="" topic="">i don't know if you can see a blue object that i draw in se image in the right side</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="297">
                <Utterance genid="298" ref="-1" time="02:04:20" date="09/12/2007" oldid="181" color="" topic="">i can see it</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="299">
                <Utterance genid="300" ref="-1" time="02:04:25" date="09/12/2007" oldid="182" color="" topic="">i can see it</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="301">
                <Utterance genid="302" ref="-1" time="02:04:37" date="09/12/2007" oldid="183" color="" topic="">i suppose (#user0#) can see it too</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="303">
                <Utterance genid="304" ref="-1" time="02:04:41" date="09/12/2007" oldid="184" color="" topic="">in this image, it will be correctly classified as blue</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="305">
                <Utterance genid="306" ref="-1" time="02:04:45" date="09/12/2007" oldid="185" color="" topic="">i also moved it :)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="307">
                <Utterance genid="308" ref="-1" time="02:04:45" date="09/12/2007" oldid="186" color="" topic="">:)</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="309">
                <Utterance genid="310" ref="-1" time="02:05:23" date="09/12/2007" oldid="187" color="" topic="">but in the left image, i think is right on the border</Utterance>
                <Utterance genid="311" ref="-1" time="02:05:35" date="09/12/2007" oldid="188" color="" topic="">an it will be classified green or blue</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="312">
                <Utterance genid="313" ref="311" time="02:06:08" date="09/12/2007" oldid="189" color="" topic="">by the way, what the meaning of the colors?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="314">
                <Utterance genid="315" ref="-1" time="02:06:32" date="09/12/2007" oldid="190" color="" topic="">not meaning, I think; just two different classes</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="316">
                <Utterance genid="317" ref="-1" time="02:06:53" date="09/12/2007" oldid="191" color="" topic="">i used colours instead of functions</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="318">
                <Utterance genid="319" ref="315" time="02:06:57" date="09/12/2007" oldid="192" color="" topic="">=no</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="320">
                <Utterance genid="321" ref="-1" time="02:08:05" date="09/12/2007" oldid="193" color="" topic="">anything else?</Utterance>
                <Utterance genid="322" ref="-1" time="02:08:11" date="09/12/2007" oldid="194" color="" topic="">about SVM</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="323">
                <Utterance genid="324" ref="-1" time="02:08:13" date="09/12/2007" oldid="195" color="" topic="">given a training set(a training set includes some input values and correct outputs) we have to define a function that best separates the two classes</Utterance>
                <Utterance genid="325" ref="-1" time="02:12:27" date="09/12/2007" oldid="196" color="" topic="">there's more. I only talked about linear classifiers, there are also non-linear classifiers, using a non-linear function( kernel function), but i will not get into details about non linear classifiers</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="326">
                <Utterance genid="327" ref="-1" time="02:12:43" date="09/12/2007" oldid="197" color="" topic="">Ok</Utterance>
                <Utterance genid="328" ref="-1" time="02:13:01" date="09/12/2007" oldid="198" color="" topic="">I thnik I understand what is it about</Utterance>
                <Utterance genid="329" ref="-1" time="02:13:55" date="09/12/2007" oldid="199" color="" topic="">Is it possible to integrate the 4 models with each other?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="330">
                <Utterance genid="331" ref="-1" time="02:14:30" date="09/12/2007" oldid="200" color="" topic="">Eliana have you finished talking about SVM?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="332">
                <Utterance genid="333" ref="-1" time="02:15:21" date="09/12/2007" oldid="201" color="" topic="">yes, i have finished</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="334">
                <Utterance genid="335" ref="-1" time="02:16:08" date="09/12/2007" oldid="202" color="" topic="">if it is ok for you i will have an example for HMM</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="336">
                <Utterance genid="337" ref="-1" time="02:16:21" date="09/12/2007" oldid="203" color="" topic="">sure</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="338">
                <Utterance genid="339" ref="-1" time="02:16:22" date="09/12/2007" oldid="204" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="340">
                <Utterance genid="341" ref="-1" time="02:16:34" date="09/12/2007" oldid="205" color="" topic="">I sais that in HMM we have two types of parameters</Utterance>
                <Utterance genid="342" ref="-1" time="02:16:49" date="09/12/2007" oldid="206" color="" topic="">one hidden (unknown) and one observable</Utterance>
                <Utterance genid="343" ref="-1" time="02:18:42" date="09/12/2007" oldid="207" color="" topic="">In this picture the hidden parametrs are x1,x2,x3</Utterance>
                <Utterance genid="344" ref="-1" time="02:18:58" date="09/12/2007" oldid="208" color="" topic="">and the observable parameters are y1,..,y4</Utterance>
                <Utterance genid="345" ref="-1" time="02:20:30" date="09/12/2007" oldid="209" color="" topic="">let's have an example: a person is talking daily at telephone with a friend from another continent. They always talk about the activities that they done.</Utterance>
                <Utterance genid="346" ref="-1" time="02:22:37" date="09/12/2007" oldid="210" color="" topic="">On the first day one of them walked in the park, the next day shopped and the third day he cleanned the house. The other person want's to guess how the weather was (sunny or rainy)</Utterance>
                <Utterance genid="347" ref="-1" time="02:22:53" date="09/12/2007" oldid="211" color="" topic="">sunny and rainy are the hidden parametrs</Utterance>
                <Utterance genid="348" ref="-1" time="02:23:06" date="09/12/2007" oldid="212" color="" topic="">and the walk,clean,shop the observable parametrs</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="349">
                <Utterance genid="350" ref="-1" time="02:26:41" date="09/12/2007" oldid="213" color="" topic="">so it must be sunny if we hear about a walk in the park</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="351">
                <Utterance genid="352" ref="-1" time="02:27:30" date="09/12/2007" oldid="214" color="" topic="">The problems that HMM can resolve are: given the output sequence( observable parametrs) find the most likely sequence of hidden state; the probability of the hidden state values and the probability of a particular output sequence given the output sequence;discover the parameters of the HMM given a datasetof sequence.</Utterance>
                <Utterance genid="353" ref="-1" time="02:29:11" date="09/12/2007" oldid="215" color="" topic="">in our example we have the output and we want the find want how was the weather when he walked,shopped and cleaned</Utterance>
                <Utterance genid="354" ref="350" time="02:29:36" date="09/12/2007" oldid="216" color="" topic="">there is a probabilty that it rainy</Utterance>
                <Utterance genid="355" ref="-1" time="02:30:05" date="09/12/2007" oldid="217" color="" topic="">:)</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="356">
                <Utterance genid="357" ref="354" time="02:30:11" date="09/12/2007" oldid="218" color="" topic="">that's right</Utterance>
                <Utterance genid="358" ref="-1" time="02:30:18" date="09/12/2007" oldid="219" color="" topic="">we don't know for sure</Utterance>
                <Utterance genid="359" ref="-1" time="02:30:37" date="09/12/2007" oldid="220" color="" topic="">as we probably don't know that it was rainy when he cleaned the house</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="360">
                <Utterance genid="361" ref="359" time="02:31:03" date="09/12/2007" oldid="221" color="" topic="">that true too</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="362">
                <Utterance genid="363" ref="-1" time="02:31:51" date="09/12/2007" oldid="222" color="" topic="">there could have been given only exemples of house related activities but the weather beeing sunny</Utterance>
                <Utterance genid="364" ref="-1" time="02:31:51" date="09/12/2007" oldid="223" color="" topic="">:)</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="365">
                <Utterance genid="366" ref="-1" time="02:31:54" date="09/12/2007" oldid="224" color="" topic="">i rephrase, it is a big probability that it was sunny if th eperson walked in the park, unless we hear "we had to finish our walk due to the weather"</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="367">
                <Utterance genid="368" ref="366" time="02:32:16" date="09/12/2007" oldid="225" color="" topic="">right</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="369">
                <Utterance genid="370" ref="-1" time="02:34:51" date="09/12/2007" oldid="226" color="" topic="">To resolve the problem we must know the probabilities for transitioning between states</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="371">
                <Utterance genid="372" ref="370" time="02:36:30" date="09/12/2007" oldid="227" color="" topic="">and we use the probabilities between x and y for that ?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="373">
                <Utterance genid="374" ref="-1" time="02:38:24" date="09/12/2007" oldid="228" color="" topic="">The algorithm used in this problem (Viterbi) must know the start_probability(the probability to be sunny/rainy when they first talk at the phone), transition_probability(the probability to be rainy/sunny if yesterday was sunny/rainy) , emission_probability( if it is rainy/sunny what is the posibility for walk,shop,clean)</Utterance>
                <Utterance genid="375" ref="372" time="02:38:46" date="09/12/2007" oldid="229" color="" topic="">yes</Utterance>
                <Utterance genid="376" ref="-1" time="02:40:05" date="09/12/2007" oldid="230" color="" topic="">this probabilities can be found out using BN or ME</Utterance>
                <Utterance genid="377" ref="-1" time="02:40:47" date="09/12/2007" oldid="231" color="" topic="">(#user0#),(#user1#) what do yuo think about this?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="378">
                <Utterance genid="379" ref="-1" time="02:41:01" date="09/12/2007" oldid="232" color="" topic="">interesting</Utterance>
                <Utterance genid="380" ref="-1" time="02:41:25" date="09/12/2007" oldid="233" color="" topic="">so this is a good example of combining our methods</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="381">
                <Utterance genid="382" ref="-1" time="02:41:26" date="09/12/2007" oldid="234" color="" topic="">As I said above, NB can be used with HMM</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="383">
                <Utterance genid="384" ref="-1" time="02:42:20" date="09/12/2007" oldid="235" color="" topic="">yeah, HMM is improving the performance of ME too</Utterance>
                <Utterance genid="385" ref="-1" time="02:43:02" date="09/12/2007" oldid="236" color="" topic="">as i told you before, I read an article about "Randomized clinical trials"</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="386">
                <Utterance genid="387" ref="-1" time="02:43:09" date="09/12/2007" oldid="237" color="" topic="">HMM improvers the performance of ME or it uses ME?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="388">
                <Utterance genid="389" ref="-1" time="02:43:10" date="09/12/2007" oldid="238" color="" topic="">this is used in medical domain</Utterance>
                <Utterance genid="390" ref="387" time="02:43:25" date="09/12/2007" oldid="239" color="" topic="">both of them</Utterance>
                <Utterance genid="391" ref="-1" time="02:43:44" date="09/12/2007" oldid="240" color="" topic="">this article, which can be found at: http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=1839538</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="392">
                <Utterance genid="393" ref="390" time="02:43:49" date="09/12/2007" oldid="241" color="" topic="">How can HMM improve the performance of ME?</Utterance>
                <Utterance genid="394" ref="-1" time="02:43:55" date="09/12/2007" oldid="242" color="" topic="">can you give me an example?</Utterance>
                <Utterance genid="395" ref="-1" time="02:44:18" date="09/12/2007" oldid="243" color="" topic="">and what about SVM?</Utterance>
                <Utterance genid="396" ref="-1" time="02:44:26" date="09/12/2007" oldid="244" color="" topic="">Eliana?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="397">
                <Utterance genid="398" ref="-1" time="02:45:09" date="09/12/2007" oldid="245" color="" topic="">if you go on the "Results" section of the article, it's said:</Utterance>
                <Utterance genid="399" ref="-1" time="02:45:11" date="09/12/2007" oldid="246" color="" topic="">"The results show F1 improvements across five type of abstract sections with HMM augmentation. Table 2 shows percentage improvements of using HMM on three classification algorithms NB, ME and DT respectively, averaging across three modification options (basic, with boosting, with bagging) and five section types."</Utterance>
                <Utterance genid="400" ref="-1" time="02:45:55" date="09/12/2007" oldid="247" color="" topic="">F1 refers to performance measuring</Utterance>
                <Utterance genid="401" ref="-1" time="02:46:21" date="09/12/2007" oldid="248" color="" topic="">and is a composite measure of classification precision and recall</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="402">
                <Utterance genid="403" ref="-1" time="02:46:45" date="09/12/2007" oldid="249" color="" topic="">i understand</Utterance>
                <Utterance genid="404" ref="-1" time="02:46:51" date="09/12/2007" oldid="250" color="" topic="">and what about SVM?</Utterance>
                <Utterance genid="405" ref="-1" time="02:47:10" date="09/12/2007" oldid="251" color="" topic="">we should think how to integrate SVM with the other models</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="406">
                <Utterance genid="407" ref="-1" time="02:48:37" date="09/12/2007" oldid="252" color="" topic="">please see the picture</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="408">
                <Utterance genid="409" ref="-1" time="02:48:55" date="09/12/2007" oldid="253" color="" topic="">i'm looking right now</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="410">
                <Utterance genid="411" ref="404" time="02:49:22" date="09/12/2007" oldid="254" color="" topic="">i think svm is powerfull enough on its own, but i also think it can be integrated with naive bayes (considering that the two classes are independent)</Utterance>
                <Utterance genid="412" ref="-1" time="02:50:32" date="09/12/2007" oldid="255" color="" topic="">i really don't see any picture</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="413">
                <Utterance genid="414" ref="407" time="02:51:24" date="09/12/2007" oldid="256" color="" topic="">how can i refer a picture from the text area?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="415">
                <Utterance genid="416" ref="-1" time="02:52:12" date="09/12/2007" oldid="257" color="" topic="">[go to the hand tool and select the picture]</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="417">
                <Utterance genid="418" ref="-1" time="02:52:18" date="09/12/2007" oldid="258" color="" topic="">you select the hand</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="419">
                <Utterance genid="420" ref="-1" time="02:52:26" date="09/12/2007" oldid="259" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="421">
                <Utterance genid="422" ref="-1" time="02:52:37" date="09/12/2007" oldid="260" color="" topic="">:)</Utterance>
                <Utterance genid="423" ref="-1" time="02:52:56" date="09/12/2007" oldid="261" color="" topic="">can you see it now?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="424">
                <Utterance genid="425" ref="-1" time="02:53:00" date="09/12/2007" oldid="262" color="" topic="">i see it now</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="426">
                <Utterance genid="427" ref="-1" time="02:53:06" date="09/12/2007" oldid="263" color="" topic="">good</Utterance>
                <Utterance genid="428" ref="-1" time="02:53:39" date="09/12/2007" oldid="264" color="" topic="">so, is there anyone who wants to add something?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="429">
                <Utterance genid="430" ref="-1" time="02:55:37" date="09/12/2007" oldid="265" color="" topic="">I'm done</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="431">
                <Utterance genid="432" ref="-1" time="02:56:08" date="09/12/2007" oldid="266" color="" topic="">me too</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="433">
                <Utterance genid="434" ref="-1" time="02:56:34" date="09/12/2007" oldid="267" color="" topic="">i have something to add</Utterance>
                <Utterance genid="435" ref="-1" time="02:59:00" date="09/12/2007" oldid="268" color="" topic="">as i was searching the Internet i found a project : Dialog act tagging that integrates HMM and SVN</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="436">
                <Utterance genid="437" ref="-1" time="03:01:46" date="09/12/2007" oldid="269" color="" topic="">would you share it with us?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="438">
                <Utterance genid="439" ref="-1" time="03:01:56" date="09/12/2007" oldid="270" color="" topic="">In this paper,they showed that linear support vector machines can easily integrate text and acoustic features,and that class posterior probabilities estimated from their outputs can be input to HMMs to produce a simple,fast dialog act classification algorithm.</Utterance>
                <Utterance genid="440" ref="-1" time="03:02:33" date="09/12/2007" oldid="271" color="" topic="">people.cs.uchicago.edu/~dinoj/svmhmmda.pdf</Utterance>
                <Utterance genid="441" ref="-1" time="03:03:21" date="09/12/2007" oldid="272" color="" topic="">so ...we can integrate all 4 methods</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="442">
                <Utterance genid="443" ref="-1" time="03:03:29" date="09/12/2007" oldid="273" color="" topic="">that right</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="444">
                <Utterance genid="445" ref="-1" time="03:03:40" date="09/12/2007" oldid="274" color="" topic="">to solve a complex problem</Utterance>
                <Utterance genid="446" ref="-1" time="03:04:05" date="09/12/2007" oldid="275" color="" topic="">ok...I have nothing to add anymore</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="447">
                <Utterance genid="448" ref="-1" time="03:04:32" date="09/12/2007" oldid="276" color="" topic="">I think this concludes our discussion today</Utterance>
                <Utterance genid="449" ref="-1" time="03:04:41" date="09/12/2007" oldid="277" color="" topic="">anyone has something else to add?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="450">
                <Utterance genid="451" ref="-1" time="03:05:06" date="09/12/2007" oldid="278" color="" topic="">anyway, as the all four methods help completing certin parts of the other methods their are certainly possible to integrate with each other</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="452">
                <Utterance genid="453" ref="-1" time="03:05:45" date="09/12/2007" oldid="279" color="" topic="">each has it's strong parts and an integration should take advantage of each one</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="454">
                <Utterance genid="455" ref="453" time="03:05:57" date="09/12/2007" oldid="280" color="" topic="">right</Utterance>
                <Utterance genid="456" ref="-1" time="03:06:17" date="09/12/2007" oldid="281" color="" topic="">so that would be all...</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="457">
                <Utterance genid="458" ref="-1" time="03:06:29" date="09/12/2007" oldid="282" color="" topic="">ok</Utterance>
                <Utterance genid="459" ref="-1" time="03:06:37" date="09/12/2007" oldid="283" color="" topic="">have a plasant afternoon</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="460">
                <Utterance genid="461" ref="-1" time="03:06:43" date="09/12/2007" oldid="284" color="" topic="">you too</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="462">
                <Utterance genid="463" ref="459" time="03:06:47" date="09/12/2007" oldid="285" color="" topic="">pleasant</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="464">
                <Utterance genid="465" ref="-1" time="03:06:48" date="09/12/2007" oldid="286" color="" topic="">tks, you too</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="466">
                <Utterance genid="467" ref="-1" time="03:06:52" date="09/12/2007" oldid="287" color="" topic="">bye</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="469">
                <Utterance genid="470" ref="-1" time="03:06:59" date="09/12/2007" oldid="289" color="" topic="">bye bye</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="471">
                <Utterance genid="472" ref="-1" time="03:07:06" date="09/12/2007" oldid="290" color="" topic="">bye</Utterance>
            </Turn>
        </Body>
    </Dialog>
