<Dialog file="">
        <Body>
            <Topics/>
            <Turn nickname="(#user2#)" genid="4">
                <Utterance genid="5" ref="-1" time="05:26:31" date="09/12/2007" oldid="4" color="" topic="">hello there</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="6">
                <Utterance genid="7" ref="-1" time="05:26:48" date="09/12/2007" oldid="5" color="" topic="">Hello!</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="8">
                <Utterance genid="9" ref="-1" time="05:26:57" date="09/12/2007" oldid="6" color="" topic="">hello!</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="10">
                <Utterance genid="11" ref="-1" time="05:27:26" date="09/12/2007" oldid="7" color="" topic="">we have to wait a little more for adi</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="12">
                <Utterance genid="13" ref="-1" time="05:27:38" date="09/12/2007" oldid="8" color="" topic="">I agree</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="14">
                <Utterance genid="15" ref="11" time="05:28:19" date="09/12/2007" oldid="9" color="" topic="">the download is taking a little more for him</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="16">
                <Utterance genid="17" ref="15" time="05:28:52" date="09/12/2007" oldid="10" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="18">
                <Utterance genid="19" ref="15" time="05:33:54" date="09/12/2007" oldid="11" color="" topic="">Are you sure he is waiting for the download?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="20">
                <Utterance genid="21" ref="19" time="05:34:26" date="09/12/2007" oldid="12" color="" topic="">that is what he has told me on ym</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="23">
                <Utterance genid="24" ref="-1" time="05:38:07" date="09/12/2007" oldid="14" color="" topic="">Welcome, (#user1#)!</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="25">
                <Utterance genid="26" ref="-1" time="05:38:28" date="09/12/2007" oldid="15" color="" topic="">Hello</Utterance>
                <Utterance genid="27" ref="-1" time="05:38:33" date="09/12/2007" oldid="16" color="" topic="">I am sorry for the delay</Utterance>
                <Utterance genid="28" ref="-1" time="05:38:40" date="09/12/2007" oldid="17" color="" topic="">lots of "stalls" in the download</Utterance>
                <Utterance genid="29" ref="-1" time="05:38:45" date="09/12/2007" oldid="18" color="" topic="">have no idea why</Utterance>
                <Utterance genid="30" ref="-1" time="05:38:46" date="09/12/2007" oldid="19" color="" topic="">any way</Utterance>
                <Utterance genid="31" ref="-1" time="05:38:50" date="09/12/2007" oldid="20" color="" topic="">gald to ssee you</Utterance>
                <Utterance genid="32" ref="-1" time="05:38:53" date="09/12/2007" oldid="21" color="" topic="">start?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="33">
                <Utterance genid="34" ref="-1" time="05:39:09" date="09/12/2007" oldid="22" color="" topic="">Yes, we should start.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="35">
                <Utterance genid="36" ref="-1" time="05:39:16" date="09/12/2007" oldid="23" color="" topic="">great</Utterance>
                <Utterance genid="37" ref="-1" time="05:39:21" date="09/12/2007" oldid="24" color="" topic="">nice Sunday</Utterance>
                <Utterance genid="38" ref="-1" time="05:39:23" date="09/12/2007" oldid="25" color="" topic="">:)</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="39">
                <Utterance genid="40" ref="-1" time="05:39:39" date="09/12/2007" oldid="26" color="" topic="">not in Berlin, it isn't...</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="41">
                <Utterance genid="42" ref="-1" time="05:39:53" date="09/12/2007" oldid="27" color="" topic="">not in Bucharest either</Utterance>
                <Utterance genid="43" ref="-1" time="05:40:00" date="09/12/2007" oldid="28" color="" topic="">at least not outside</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="44">
                <Utterance genid="45" ref="32" time="05:40:58" date="09/12/2007" oldid="29" color="" topic="">are we going to plan the discussion or are we going to free chat?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="46">
                <Utterance genid="47" ref="-1" time="05:41:10" date="09/12/2007" oldid="30" color="" topic="">free chat?</Utterance>
                <Utterance genid="48" ref="-1" time="05:41:22" date="09/12/2007" oldid="31" color="" topic="">ok, lets start</Utterance>
                <Utterance genid="49" ref="-1" time="05:41:26" date="09/12/2007" oldid="32" color="" topic="">what is the plan?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="50">
                <Utterance genid="51" ref="-1" time="05:41:41" date="09/12/2007" oldid="33" color="" topic="">Maybe we should each say a few words about our topic first.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="52">
                <Utterance genid="53" ref="49" time="05:42:18" date="09/12/2007" oldid="34" color="" topic="">if we use a plan i siggest three phases: introduction to each method, argument and conclusions</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="54">
                <Utterance genid="55" ref="49" time="05:43:12" date="09/12/2007" oldid="35" color="" topic="">jsut a test for references</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="56">
                <Utterance genid="57" ref="55" time="05:43:38" date="09/12/2007" oldid="36" color="" topic="">with esc you can unset a reference</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="58">
                <Utterance genid="59" ref="-1" time="05:44:05" date="09/12/2007" oldid="37" color="" topic="">aa, true</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="60">
                <Utterance genid="61" ref="-1" time="05:44:07" date="09/12/2007" oldid="38" color="" topic="">I was wondering about that</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="62">
                <Utterance genid="63" ref="55" time="05:45:10" date="09/12/2007" oldid="39" color="" topic="">i think that we should use references as much as possible</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="64">
                <Utterance genid="65" ref="63" time="05:45:32" date="09/12/2007" oldid="40" color="" topic="">That's a good ideea.</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="66">
                <Utterance genid="67" ref="53" time="05:45:51" date="09/12/2007" oldid="41" color="" topic="">I will describe the "Principle of maximum entropy for Text Classification", this will introduce you to one method for text classifications.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="68">
                <Utterance genid="69" ref="67" time="05:46:32" date="09/12/2007" oldid="42" color="" topic="">You shoud go ahead and start...</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="70">
                <Utterance genid="71" ref="69" time="05:47:30" date="09/12/2007" oldid="43" color="" topic="">i understand we need the introductions for each method</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="72">
                <Utterance genid="73" ref="-1" time="05:48:09" date="09/12/2007" oldid="44" color="" topic="">OK. I will talk about the Naive Bayes classifier.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="74">
                <Utterance genid="75" ref="73" time="05:48:57" date="09/12/2007" oldid="45" color="" topic="">i shall introduce the support vector machines</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="76">
                <Utterance genid="77" ref="75" time="05:49:29" date="09/12/2007" oldid="46" color="" topic="">Hidden Markow Models (HMM)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="78">
                <Utterance genid="79" ref="-1" time="05:50:15" date="09/12/2007" oldid="47" color="" topic="">who's starting ?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="80">
                <Utterance genid="81" ref="-1" time="05:50:36" date="09/12/2007" oldid="48" color="" topic="">:)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="82">
                <Utterance genid="83" ref="79" time="05:51:42" date="09/12/2007" oldid="49" color="" topic="">then, can i start ?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="84">
                <Utterance genid="85" ref="83" time="05:51:59" date="09/12/2007" oldid="50" color="" topic="">OK</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="86">
                <Utterance genid="87" ref="-1" time="05:52:54" date="09/12/2007" oldid="51" color="" topic="">so, support vector machine (svm) tries to find a hyperplane that separates the training set into distinct categories</Utterance>
                <Utterance genid="88" ref="87" time="05:54:18" date="09/12/2007" oldid="52" color="" topic="">in its simplest form, with only two categories (pozitive and negative examples), it tries to separate the points of the vector space so that one category (positive) are on a part of the hyperplane, while the others (negatrive examples) are on the other part</Utterance>
                <Utterance genid="89" ref="88" time="05:55:19" date="09/12/2007" oldid="53" color="" topic="">moreover, the hyperplane is computed such that the distance from any training point to it to be maximized</Utterance>
                <Utterance genid="90" ref="89" time="05:56:01" date="09/12/2007" oldid="54" color="" topic="">of course, most of the times it is difficult to find a hyperplane to fully separate the two categories</Utterance>
                <Utterance genid="91" ref="90" time="05:56:43" date="09/12/2007" oldid="55" color="" topic="">therefore, the method uses penalties for each point that is classified on the wrong side of this plane</Utterance>
                <Utterance genid="92" ref="-1" time="05:57:09" date="09/12/2007" oldid="56" color="" topic="">this is the main ideea for svm</Utterance>
                <Utterance genid="93" ref="92" time="05:58:04" date="09/12/2007" oldid="57" color="" topic="">training a svm means finding the coefficients that define this hyperplane and this is a quadratic optimization problem</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="94">
                <Utterance genid="95" ref="93" time="05:58:15" date="09/12/2007" oldid="58" color="" topic="">The training data is comprised of points in a multidimensional space?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="96">
                <Utterance genid="97" ref="95" time="05:58:46" date="09/12/2007" oldid="59" color="" topic="">yes, each text document (for texts) is seen as a vector in the word-space model</Utterance>
                <Utterance genid="98" ref="97" time="05:59:19" date="09/12/2007" oldid="60" color="" topic="">or as a point (the vector is from the origin to that point)</Utterance>
                <Utterance genid="99" ref="97" time="06:00:31" date="09/12/2007" oldid="61" color="" topic="">sometimes, other techniuqes can be used to reduce the size of the vector space: like feature selection of the most important features</Utterance>
                <Utterance genid="100" ref="93" time="06:03:17" date="09/12/2007" oldid="62" color="" topic="">the training phase ussualy takes O(n pow 2) , but it can be reduced to linear time , where n is the number of text documents from the training set</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="101">
                <Utterance genid="102" ref="100" time="06:03:59" date="09/12/2007" oldid="63" color="" topic="">I was just about to ask you about the complexity...</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="103">
                <Utterance genid="104" ref="102" time="06:05:10" date="09/12/2007" oldid="64" color="" topic="">the classification is very fast, as all we have to do is to see on each side of the hyperplane the new document is placed</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="105">
                <Utterance genid="106" ref="-1" time="06:05:36" date="09/12/2007" oldid="65" color="" topic="">I see.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="107">
                <Utterance genid="108" ref="-1" time="06:06:30" date="09/12/2007" oldid="66" color="" topic="">its accuracy is about 80% or above for a lot of text classification tasks, like dmoz (open directory) classification, news classification on topic, etc.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="109">
                <Utterance genid="110" ref="108" time="06:07:25" date="09/12/2007" oldid="67" color="" topic="">greately outperforms HMM at signature recognition</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="111">
                <Utterance genid="112" ref="108" time="06:07:59" date="09/12/2007" oldid="68" color="" topic="">in a microsoft research study done on a set of news from reuters, it is considered the best classifier, with permormance close to 90%</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="113">
                <Utterance genid="114" ref="110" time="06:08:14" date="09/12/2007" oldid="69" color="" topic="">You should tell us about HMM, (#user1#).</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="115">
                <Utterance genid="116" ref="112" time="06:08:36" date="09/12/2007" oldid="70" color="" topic="">regardless of the measure of the vector space (like number of occurances or binary (for appearance or not of a term))</Utterance>
                <Utterance genid="117" ref="114" time="06:08:58" date="09/12/2007" oldid="71" color="" topic="">yes, i agree; lets see the other methods</Utterance>
                <Utterance genid="118" ref="117" time="06:11:12" date="09/12/2007" oldid="72" color="" topic="">em , hmm or bayes ?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="119">
                <Utterance genid="120" ref="-1" time="06:11:37" date="09/12/2007" oldid="73" color="" topic="">ok then... ME since you mentioned it first</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="121">
                <Utterance genid="122" ref="120" time="06:11:53" date="09/12/2007" oldid="74" color="" topic="">OK</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="123">
                <Utterance genid="124" ref="-1" time="06:12:34" date="09/12/2007" oldid="75" color="" topic="">the principle of maximum entropy is a method for analyzing the available information in order to determina a unique epistemic probability distribution.</Utterance>
                <Utterance genid="125" ref="-1" time="06:13:06" date="09/12/2007" oldid="76" color="" topic="">it can be used to estimate input probabilities more generally.</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="127">
                <Utterance genid="128" ref="-1" time="06:13:37" date="09/12/2007" oldid="78" color="" topic="">It provides a reasonable way of estimating probability distributions from the training data.</Utterance>
                <Utterance genid="129" ref="-1" time="06:14:41" date="09/12/2007" oldid="79" color="" topic="">the key principle is that when nothing is known about certain features, the distribution for them should be as uniform as possible ( this explains the term "maximum entropy")</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="130">
                <Utterance genid="131" ref="124" time="06:15:08" date="09/12/2007" oldid="80" color="" topic="">what is epistemic ?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="132">
                <Utterance genid="133" ref="-1" time="06:15:11" date="09/12/2007" oldid="81" color="" topic="">this makes ME a very simple and fast method</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="134">
                <Utterance genid="135" ref="131" time="06:16:02" date="09/12/2007" oldid="82" color="" topic="">what does it mean ?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="136">
                <Utterance genid="137" ref="-1" time="06:16:17" date="09/12/2007" oldid="83" color="" topic="">Epistemic probability is an interpretation of the probability calculus which holds that the concenpt of probability can be defined as the degree to which a person believes that a statement is true.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="138">
                <Utterance genid="139" ref="137" time="06:17:15" date="09/12/2007" oldid="84" color="" topic="">i understand</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="140">
                <Utterance genid="141" ref="-1" time="06:17:40" date="09/12/2007" oldid="85" color="" topic="">NE has applications in many domains, and was discovered first in statistical physics, but it is now widely used for a variety of natural language tasks:</Utterance>
                <Utterance genid="142" ref="-1" time="06:18:12" date="09/12/2007" oldid="86" color="" topic="">language modelling , part-of-speach tagging, text segmentation, etc</Utterance>
                <Utterance genid="143" ref="-1" time="06:18:28" date="09/12/2007" oldid="87" color="" topic="">NE should be ME, sorry for the typo.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="144">
                <Utterance genid="145" ref="141" time="06:18:36" date="09/12/2007" oldid="88" color="" topic="">How about performance?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="147">
                <Utterance genid="148" ref="145" time="06:19:22" date="09/12/2007" oldid="90" color="" topic="">in general, it is a very fast method, but it depends a lot on the data sets</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="149">
                <Utterance genid="150" ref="141" time="06:19:32" date="09/12/2007" oldid="91" color="" topic="">does it use hill climbing to find the maximum likehood ?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="151">
                <Utterance genid="152" ref="-1" time="06:19:47" date="09/12/2007" oldid="92" color="" topic="">hy again, I am soryy, it threw me out, back (and silent for now )</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="153">
                <Utterance genid="154" ref="150" time="06:21:23" date="09/12/2007" oldid="93" color="" topic="">no, it does not use it</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="155">
                <Utterance genid="156" ref="154" time="06:22:47" date="09/12/2007" oldid="94" color="" topic="">ok; don't know why i remembered that the maximization phase means finding the local optimum using hill climbing like tehnicques</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="157">
                <Utterance genid="158" ref="150" time="06:25:26" date="09/12/2007" oldid="95" color="" topic="">ME is only useful when applied to testable information (information is testable if it can be determined whether a given distribution is consistent with it)</Utterance>
                <Utterance genid="159" ref="-1" time="06:26:23" date="09/12/2007" oldid="96" color="" topic="">An example for the statement can be: "The expectation of tha variable x is 1.62"</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="160">
                <Utterance genid="161" ref="158" time="06:27:29" date="09/12/2007" oldid="97" color="" topic="">i understand</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="162">
                <Utterance genid="163" ref="-1" time="06:27:45" date="09/12/2007" oldid="98" color="" topic="">In this case, for that statement, ME procedure consists of seeking the distribution which maximizes information entropy.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="164">
                <Utterance genid="165" ref="163" time="06:28:18" date="09/12/2007" oldid="99" color="" topic="">here i thought it uses something similar to hill climbing</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="166">
                <Utterance genid="167" ref="-1" time="06:28:41" date="09/12/2007" oldid="100" color="" topic="">(#user3#), if you don't have anything more to add, I'd like to begin my short presentation</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="168">
                <Utterance genid="169" ref="167" time="06:29:12" date="09/12/2007" oldid="101" color="" topic="">i ahve a question regardint its parameters: accuracy, ...</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="170">
                <Utterance genid="171" ref="-1" time="06:29:38" date="09/12/2007" oldid="102" color="" topic="">(#user0#), it has been shown that ME and Bayes' Rule are completely compatible... so, please introduce that to us</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="172">
                <Utterance genid="173" ref="171" time="06:30:30" date="09/12/2007" oldid="103" color="" topic="">Maybe you should answer (#user2#) question first...</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="174">
                <Utterance genid="175" ref="173" time="06:33:08" date="09/12/2007" oldid="104" color="" topic="">guess it's somewhere about 70%; or maybe i'm wrong; cause it depends a lot where it is used</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="176">
                <Utterance genid="177" ref="-1" time="06:33:39" date="09/12/2007" oldid="105" color="" topic="">http://www.speech.sri.com/papers/eurospeech2005-df-detect.ps.gz</Utterance>
                <Utterance genid="178" ref="-1" time="06:33:58" date="09/12/2007" oldid="106" color="" topic="">it is about 55 % or 43 %, depends</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="179">
                <Utterance genid="180" ref="169" time="06:34:04" date="09/12/2007" oldid="107" color="" topic="">yes, A data set, used in a research done at Carnegie Mellon, ME reduces classification error by more than 40% compared to naive Bayes. But on other data sets, basic maximum entropy does not perform as well as Naive Bayes</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="181">
                <Utterance genid="182" ref="-1" time="06:34:23" date="09/12/2007" oldid="108" color="" topic="">these are for specch recognition for telephone conversations and brodcast news</Utterance>
                <Utterance genid="183" ref="-1" time="06:34:25" date="09/12/2007" oldid="109" color="" topic="">(error rates)</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="184">
                <Utterance genid="185" ref="178" time="06:35:26" date="09/12/2007" oldid="110" color="" topic="">yes, it varies, you are right, (#user1#)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="186">
                <Utterance genid="187" ref="185" time="06:36:06" date="09/12/2007" oldid="111" color="" topic="">so it can be used for boosting performances to the bayes approach ?</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="188">
                <Utterance genid="189" ref="187" time="06:38:03" date="09/12/2007" oldid="112" color="" topic="">yes, compared to Bayes, it is more performant, on some data sets</Utterance>
                <Utterance genid="190" ref="-1" time="06:39:13" date="09/12/2007" oldid="113" color="" topic="">so, what about Bayes, (#user0#)?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="191">
                <Utterance genid="192" ref="-1" time="06:39:36" date="09/12/2007" oldid="114" color="" topic="">Well, the naive Bayesian classifier is very popular because of it's speed and ease of use.</Utterance>
                <Utterance genid="193" ref="-1" time="06:40:15" date="09/12/2007" oldid="115" color="" topic="">I will use semantics to illustrate the examples. Supposed we want to determine the meaning of a word in a sentence.</Utterance>
                <Utterance genid="194" ref="-1" time="06:41:01" date="09/12/2007" oldid="116" color="" topic="">The meaning is dependent on the sorrounding words.</Utterance>
                <Utterance genid="195" ref="-1" time="06:41:47" date="09/12/2007" oldid="117" color="" topic="">Using the Bayes theorem, we can reverse this dependency.</Utterance>
                <Utterance genid="196" ref="-1" time="06:42:33" date="09/12/2007" oldid="118" color="" topic="">So, we are now talking of the probability of the other words appearing next to the word we are analyzing.</Utterance>
                <Utterance genid="197" ref="-1" time="06:43:36" date="09/12/2007" oldid="119" color="" topic="">We now make a naive hypothesis: there is no link between the words, the probabilities are completely independent.</Utterance>
                <Utterance genid="198" ref="-1" time="06:45:27" date="09/12/2007" oldid="120" color="" topic="">Taking this simplification into consideration, we now only have to multiply a set of probabilities that we already know from the training data. We can do this in linear time, depending on the number of surrounding words we take into consideration.</Utterance>
                <Utterance genid="199" ref="-1" time="06:47:41" date="09/12/2007" oldid="121" color="" topic="">Given the overly simplifying assumption about the independence of the words, the results (#user3#) and (#user1#) mentioned are actualy very good.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="200">
                <Utterance genid="201" ref="198" time="06:47:54" date="09/12/2007" oldid="122" color="" topic="">so the training phase consists of computing some probabilities ?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="202">
                <Utterance genid="203" ref="-1" time="06:48:46" date="09/12/2007" oldid="123" color="" topic="">Yes, it is very simple, it only involves counting the appearances of words.</Utterance>
                <Utterance genid="204" ref="77" time="06:50:45" date="09/12/2007" oldid="124" color="" topic="">What about HMM, (#user1#)?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="205">
                <Utterance genid="206" ref="-1" time="06:51:01" date="09/12/2007" oldid="125" color="" topic="">ok</Utterance>
                <Utterance genid="207" ref="204" time="06:52:11" date="09/12/2007" oldid="126" color="" topic="">lots about them</Utterance>
                <Utterance genid="208" ref="-1" time="06:52:11" date="09/12/2007" oldid="127" color="" topic="">but first</Utterance>
                <Utterance genid="209" ref="-1" time="06:52:11" date="09/12/2007" oldid="128" color="" topic="">(#user2#)</Utterance>
                <Utterance genid="210" ref="-1" time="06:52:12" date="09/12/2007" oldid="129" color="" topic="">please buzz me on Y mess if my meseger do not appear fvery oftern</Utterance>
                <Utterance genid="211" ref="-1" time="06:52:12" date="09/12/2007" oldid="130" color="" topic="">(because I do not see what I have written)</Utterance>
                <Utterance genid="212" ref="-1" time="06:52:12" date="09/12/2007" oldid="131" color="" topic="">do you see this? (waht I am typoing)</Utterance>
                <Utterance genid="213" ref="-1" time="06:52:12" date="09/12/2007" oldid="132" color="" topic="">?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="214">
                <Utterance genid="215" ref="212" time="06:52:35" date="09/12/2007" oldid="133" color="" topic="">yes, it's finally here...</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="216">
                <Utterance genid="217" ref="-1" time="06:52:41" date="09/12/2007" oldid="134" color="" topic="">ok, it seems I am writing in burst, but I will write like this, and see later :)</Utterance>
                <Utterance genid="218" ref="-1" time="06:53:19" date="09/12/2007" oldid="135" color="" topic="">seo, second, when I got logged out., I was just sending this uinteresting URL about SVM and HMM</Utterance>
                <Utterance genid="219" ref="-1" time="06:53:20" date="09/12/2007" oldid="136" color="" topic="">http://www.ppgia.pucpr.br/papers/Justino_PRL_2005.pdf</Utterance>
                <Utterance genid="220" ref="-1" time="06:53:39" date="09/12/2007" oldid="137" color="" topic="">in which SVM oblityerates HMM at human segnature srecognitinon</Utterance>
                <Utterance genid="221" ref="-1" time="06:53:47" date="09/12/2007" oldid="138" color="" topic="">ok, an dinallyt, about HMM</Utterance>
                <Utterance genid="222" ref="-1" time="06:54:06" date="09/12/2007" oldid="139" color="" topic="">Morkov chasenes (without "hideden")</Utterance>
                <Utterance genid="223" ref="-1" time="06:54:11" date="09/12/2007" oldid="140" color="" topic="">are finite automata</Utterance>
                <Utterance genid="224" ref="-1" time="06:54:24" date="09/12/2007" oldid="141" color="" topic="">with prebabilities as treier links</Utterance>
                <Utterance genid="225" ref="-1" time="06:54:28" date="09/12/2007" oldid="142" color="" topic="">so</Utterance>
                <Utterance genid="226" ref="-1" time="06:55:04" date="09/12/2007" oldid="143" color="" topic="">so if we have VERB and SUBST, there is thre probability of 70% of going from VERB to SUBST</Utterance>
                <Utterance genid="227" ref="-1" time="06:55:13" date="09/12/2007" oldid="144" color="" topic="">and 30 from VERB to other tings</Utterance>
                <Utterance genid="228" ref="-1" time="06:55:30" date="09/12/2007" oldid="145" color="" topic="">the ideea is theat you can compute that probability of as chanin straeightforward</Utterance>
                <Utterance genid="229" ref="-1" time="06:55:36" date="09/12/2007" oldid="146" color="" topic="">jsut mutlipling probabiolities</Utterance>
                <Utterance genid="230" ref="-1" time="06:55:53" date="09/12/2007" oldid="147" color="" topic="">Now the Hiodden Markow chaes</Utterance>
                <Utterance genid="231" ref="-1" time="06:55:53" date="09/12/2007" oldid="148" color="" topic="">have hidden states</Utterance>
                <Utterance genid="232" ref="-1" time="06:56:00" date="09/12/2007" oldid="149" color="" topic="">like for examle (the example form The mMartin boodk)(</Utterance>
                <Utterance genid="233" ref="-1" time="06:56:21" date="09/12/2007" oldid="150" color="" topic="">iit there are certain probabilittie s of swcwirttchi ng form cold to how weather</Utterance>
                <Utterance genid="234" ref="-1" time="06:56:28" date="09/12/2007" oldid="151" color="" topic="">these are "shown" probabilities.</Utterance>
                <Utterance genid="235" ref="-1" time="06:56:42" date="09/12/2007" oldid="152" color="" topic="">however, there is some probability that you drink juice on cold weather</Utterance>
                <Utterance genid="236" ref="-1" time="06:56:55" date="09/12/2007" oldid="153" color="" topic="">and another porboability ot drink jucie on hot weatrer</Utterance>
                <Utterance genid="237" ref="-1" time="06:56:59" date="09/12/2007" oldid="154" color="" topic="">so the quaesito is</Utterance>
                <Utterance genid="238" ref="-1" time="06:57:11" date="09/12/2007" oldid="155" color="" topic="">someone is drinking juice, is not drinking ans so on</Utterance>
                <Utterance genid="239" ref="-1" time="06:57:18" date="09/12/2007" oldid="156" color="" topic="">what is the weather likle?</Utterance>
                <Utterance genid="240" ref="-1" time="06:57:32" date="09/12/2007" oldid="157" color="" topic="">so they are "?hidden" because you just know what you can observe</Utterance>
                <Utterance genid="241" ref="-1" time="06:57:37" date="09/12/2007" oldid="158" color="" topic="">the jucices drunk</Utterance>
                <Utterance genid="242" ref="-1" time="06:57:43" date="09/12/2007" oldid="159" color="" topic="">but you do not know the wether</Utterance>
                <Utterance genid="243" ref="-1" time="06:57:48" date="09/12/2007" oldid="160" color="" topic="">withc you hjave yto fguess</Utterance>
                <Utterance genid="244" ref="-1" time="06:57:56" date="09/12/2007" oldid="161" color="" topic="">not more practically</Utterance>
                <Utterance genid="245" ref="-1" time="06:58:02" date="09/12/2007" oldid="162" color="" topic="">and HMM are good at this</Utterance>
                <Utterance genid="246" ref="-1" time="06:58:36" date="09/12/2007" oldid="163" color="" topic="">if lets say in an office you can count the nubmer of keeyboard yticks, and the mouse moves and the sound in the roon</Utterance>
                <Utterance genid="247" ref="-1" time="06:58:42" date="09/12/2007" oldid="164" color="" topic="">and some other actions</Utterance>
                <Utterance genid="248" ref="-1" time="06:58:52" date="09/12/2007" oldid="165" color="" topic="">you want to know what your enployees are doing</Utterance>
                <Utterance genid="249" ref="-1" time="06:58:57" date="09/12/2007" oldid="166" color="" topic="">(ie slaving fro you or not)</Utterance>
                <Utterance genid="250" ref="-1" time="06:59:13" date="09/12/2007" oldid="167" color="" topic="">so the HMM deduces what your ewnoployees are doing</Utterance>
                <Utterance genid="251" ref="-1" time="06:59:17" date="09/12/2007" oldid="168" color="" topic="">based on the observations</Utterance>
                <Utterance genid="252" ref="-1" time="06:59:21" date="09/12/2007" oldid="169" color="" topic="">from ythe sendosrs</Utterance>
                <Utterance genid="253" ref="-1" time="06:59:26" date="09/12/2007" oldid="170" color="" topic=""/>
                <Utterance genid="254" ref="-1" time="06:59:29" date="09/12/2007" oldid="171" color="" topic="">jsut oa mont</Utterance>
                <Utterance genid="255" ref="-1" time="06:59:33" date="09/12/2007" oldid="172" color="" topic="">very good example</Utterance>
                <Utterance genid="256" ref="-1" time="06:59:57" date="09/12/2007" oldid="173" color="" topic="">http://research.microsoft.com/~horvitz/DBN_HMM.pdf</Utterance>
                <Utterance genid="257" ref="-1" time="07:00:00" date="09/12/2007" oldid="174" color="" topic="">thsi</Utterance>
                <Utterance genid="258" ref="-1" time="07:00:16" date="09/12/2007" oldid="175" color="" topic="">it deduyce exactely what I have said before</Utterance>
                <Utterance genid="259" ref="-1" time="07:00:32" date="09/12/2007" oldid="176" color="" topic="">but it is much cheeper than a sort of Bayes</Utterance>
                <Utterance genid="260" ref="-1" time="07:00:46" date="09/12/2007" oldid="177" color="" topic="">DBN</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="261">
                <Utterance genid="262" ref="259" time="07:00:46" date="09/12/2007" oldid="178" color="" topic="">It sounds very nice, but is it accurate?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="263">
                <Utterance genid="264" ref="-1" time="07:00:55" date="09/12/2007" oldid="179" color="" topic="">depends on the application</Utterance>
                <Utterance genid="265" ref="-1" time="07:01:00" date="09/12/2007" oldid="180" color="" topic="">for this example</Utterance>
                <Utterance genid="266" ref="-1" time="07:01:18" date="09/12/2007" oldid="181" color="" topic="">for some actions is more curate taht DBN ((#user0#), is thsi related to Bayes Naiver)</Utterance>
                <Utterance genid="267" ref="-1" time="07:01:25" date="09/12/2007" oldid="182" color="" topic="">and fofr others more imprecise</Utterance>
                <Utterance genid="268" ref="-1" time="07:01:27" date="09/12/2007" oldid="183" color="" topic="">however</Utterance>
                <Utterance genid="269" ref="-1" time="07:01:36" date="09/12/2007" oldid="184" color="" topic="">9it is much cheeper all the time</Utterance>
                <Utterance genid="270" ref="-1" time="07:01:39" date="09/12/2007" oldid="185" color="" topic="">like for example</Utterance>
                <Utterance genid="271" ref="-1" time="07:01:48" date="09/12/2007" oldid="186" color="" topic="">3 % vs 98 % sensor utilization</Utterance>
                <Utterance genid="272" ref="-1" time="07:01:49" date="09/12/2007" oldid="187" color="" topic=""/>
            </Turn>
            <Turn nickname="(#user2#)" genid="273">
                <Utterance genid="274" ref="266" time="07:02:07" date="09/12/2007" oldid="188" color="" topic="">i guess it's bayesian networks</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="275">
                <Utterance genid="276" ref="271" time="07:02:07" date="09/12/2007" oldid="189" color="" topic="">That's quite a difference!</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="277">
                <Utterance genid="278" ref="-1" time="07:02:08" date="09/12/2007" oldid="190" color="" topic="">aa</Utterance>
                <Utterance genid="279" ref="-1" time="07:02:15" date="09/12/2007" oldid="191" color="" topic="">and also about SVM (wn I got logged out)</Utterance>
                <Utterance genid="280" ref="-1" time="07:02:23" date="09/12/2007" oldid="192" color="" topic="">http://ieeexplore.ieee.org/Xplore/login.jsp?url=/iel5/8535/27072/01202331.pdf?arnumber=1202331</Utterance>
                <Utterance genid="281" ref="-1" time="07:02:26" date="09/12/2007" oldid="193" color="" topic="">thius</Utterance>
                <Utterance genid="282" ref="-1" time="07:02:50" date="09/12/2007" oldid="194" color="" topic="">tells that SVM is pooere that SVM used together with HMM</Utterance>
                <Utterance genid="283" ref="-1" time="07:03:03" date="09/12/2007" oldid="195" color="" topic="">I do not knwo exactely hwo they work tohgether, because I do not have acces to the whole article,</Utterance>
                <Utterance genid="284" ref="-1" time="07:03:06" date="09/12/2007" oldid="196" color="" topic="">but I trust tehm</Utterance>
                <Utterance genid="285" ref="-1" time="07:03:07" date="09/12/2007" oldid="197" color="" topic="">:)</Utterance>
                <Utterance genid="286" ref="276" time="07:03:28" date="09/12/2007" oldid="198" color="" topic="">aa, yes, but not for all, it is just an extreme exemple</Utterance>
                <Utterance genid="287" ref="-1" time="07:03:44" date="09/12/2007" oldid="199" color="" topic="">in any case</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="288">
                <Utterance genid="289" ref="282" time="07:03:46" date="09/12/2007" oldid="200" color="" topic="">Of course, hybrid algorithms are always better.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="290">
                <Utterance genid="291" ref="-1" time="07:03:53" date="09/12/2007" oldid="201" color="" topic="">youy can compare (last wtow pages, very good)</Utterance>
                <Utterance genid="292" ref="-1" time="07:04:13" date="09/12/2007" oldid="202" color="" topic="">yes, probably</Utterance>
                <Utterance genid="293" ref="-1" time="07:04:29" date="09/12/2007" oldid="203" color="" topic="">so, that youd be it</Utterance>
                <Utterance genid="294" ref="-1" time="07:04:47" date="09/12/2007" oldid="204" color="" topic="">as I said, these links are good for comparison, and sas for the theretical algoritms</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="295">
                <Utterance genid="296" ref="289" time="07:04:50" date="09/12/2007" oldid="205" color="" topic="">depending on the task</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="297">
                <Utterance genid="298" ref="-1" time="07:04:57" date="09/12/2007" oldid="206" color="" topic="">there are more plebles that one can askl</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="299">
                <Utterance genid="300" ref="-1" time="07:05:02" date="09/12/2007" oldid="207" color="" topic="">I'se seen a paper describing an improved native Bayes algorithm achieving about 83% accuracy. http://www.math.upatras.gr/~esdlab/en/members/kotsiantis/05_Kotsiantis-Logitboost%20of%20simble%20bayesian..._No%205.pdf</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="301">
                <Utterance genid="302" ref="296" time="07:05:10" date="09/12/2007" oldid="208" color="" topic="">and on the time you have for solving that task</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="303">
                <Utterance genid="304" ref="296" time="07:05:17" date="09/12/2007" oldid="209" color="" topic="">yes, about tasks, one interesting task is in echonomics</Utterance>
                <Utterance genid="305" ref="304" time="07:05:30" date="09/12/2007" oldid="210" color="" topic="">http://imaman.oxfordjournals.org/cgi/reprint/15/1/13.pdf</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="306">
                <Utterance genid="307" ref="302" time="07:05:32" date="09/12/2007" oldid="211" color="" topic="">Yes, performance is also a factor.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="308">
                <Utterance genid="309" ref="300" time="07:05:36" date="09/12/2007" oldid="212" color="" topic="">the impoved bayes maethod uses bayesian networks as i recall</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="310">
                <Utterance genid="311" ref="-1" time="07:05:52" date="09/12/2007" oldid="213" color="" topic="">here they deduce what customes are loyal and which are not</Utterance>
                <Utterance genid="312" ref="-1" time="07:05:53" date="09/12/2007" oldid="214" color="" topic="">considering their actions</Utterance>
                <Utterance genid="313" ref="-1" time="07:05:56" date="09/12/2007" oldid="215" color="" topic="">it is just like in the cold/hot weatrher example</Utterance>
                <Utterance genid="314" ref="-1" time="07:06:05" date="09/12/2007" oldid="216" color="" topic="">based on thwat those cuswtomesr s are doin</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="315">
                <Utterance genid="316" ref="309" time="07:06:33" date="09/12/2007" oldid="217" color="" topic="">I'm not sure... However, it seems clear that a method combining different approaches is more likely to succed.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="317">
                <Utterance genid="318" ref="309" time="07:06:35" date="09/12/2007" oldid="218" color="" topic="">and it is better because it does not use the naive principle; which is great for complexity and speed, but not so good for accuracy</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="319">
                <Utterance genid="320" ref="-1" time="07:06:38" date="09/12/2007" oldid="219" color="" topic="">you cocnlude if they are loyal or not (and take appropriate actions, like offering mmore dservices for the loyal ones and more disounts for the displo0yal ones --echinoimy stuff)</Utterance>
                <Utterance genid="321" ref="318" time="07:07:16" date="09/12/2007" oldid="220" color="" topic="">about thsi acuracy thing</Utterance>
                <Utterance genid="322" ref="321" time="07:07:21" date="09/12/2007" oldid="221" color="" topic="">http://matwbn.icm.edu.pl/ksiazki/amc/amc15/amc15211.pdf</Utterance>
                <Utterance genid="323" ref="322" time="07:07:36" date="09/12/2007" oldid="222" color="" topic="">says that dthe difference in the error is between 10% and 30%</Utterance>
                <Utterance genid="324" ref="-1" time="07:07:54" date="09/12/2007" oldid="223" color="" topic="">and the difference in the assumptions made (aska very naive indeed or bvery acurate is enormous)</Utterance>
                <Utterance genid="325" ref="324" time="07:08:35" date="09/12/2007" oldid="224" color="" topic="">so, 10-30% semes to be decent, considering the ipothesis (I say seems becauseit is their optinion, I do not hete the right authotity to say this :D:D:)</Utterance>
                <Utterance genid="326" ref="325" time="07:09:03" date="09/12/2007" oldid="225" color="" topic="">(its about speech recognition)</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="327">
                <Utterance genid="328" ref="324" time="07:09:05" date="09/12/2007" oldid="226" color="" topic="">That is a surprising result, but I've seen a paper explaining why this happens. http://www.cs.unb.ca/profs/hzhang/publications/FLAIRS04ZhangH.pdf</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="329">
                <Utterance genid="330" ref="-1" time="07:09:15" date="09/12/2007" oldid="227" color="" topic="">aa</Utterance>
                <Utterance genid="331" ref="328" time="07:09:21" date="09/12/2007" oldid="228" color="" topic="">and why?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="332">
                <Utterance genid="333" ref="-1" time="07:11:09" date="09/12/2007" oldid="229" color="" topic="">I didn't really follow the demonstration, I've just noted that someone explained the apparently unreasonable result.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="334">
                <Utterance genid="335" ref="328" time="07:11:21" date="09/12/2007" oldid="230" color="" topic="">this paper cannot be understood at a glance</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="336">
                <Utterance genid="337" ref="335" time="07:11:49" date="09/12/2007" oldid="231" color="" topic="">That's true.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="338">
                <Utterance genid="339" ref="335" time="07:12:01" date="09/12/2007" oldid="232" color="" topic="">probably the case for any significat paper</Utterance>
                <Utterance genid="340" ref="-1" time="07:12:11" date="09/12/2007" oldid="233" color="" topic="">oh</Utterance>
                <Utterance genid="341" ref="-1" time="07:12:16" date="09/12/2007" oldid="234" color="" topic="">and (#user2#), I remembered</Utterance>
                <Utterance genid="342" ref="-1" time="07:12:20" date="09/12/2007" oldid="235" color="" topic="">i nthe same paper:</Utterance>
                <Utterance genid="343" ref="-1" time="07:12:28" date="09/12/2007" oldid="236" color="" topic="">http://www.lsi.upc.es/~lluism/masterIA/curs06-07/pract/Chunking.ppt</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="344">
                <Utterance genid="345" ref="325" time="07:12:34" date="09/12/2007" oldid="237" color="" topic="">speech recognition is quite a distinct problem than text classification; it is significatively harder</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="346">
                <Utterance genid="347" ref="343" time="07:12:35" date="09/12/2007" oldid="238" color="" topic="">(its actuyllay another )</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="348">
                <Utterance genid="349" ref="345" time="07:12:45" date="09/12/2007" oldid="239" color="" topic="">and requires different methods</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="350">
                <Utterance genid="351" ref="-1" time="07:12:48" date="09/12/2007" oldid="240" color="" topic="">they say taht SVM outperforms HMM i ngeneral</Utterance>
                <Utterance genid="352" ref="-1" time="07:13:10" date="09/12/2007" oldid="241" color="" topic="">but when using elxical specailization , HMM is much faster</Utterance>
                <Utterance genid="353" ref="-1" time="07:13:24" date="09/12/2007" oldid="242" color="" topic="">so... jsut as an comaprison</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="354">
                <Utterance genid="355" ref="352" time="07:13:53" date="09/12/2007" oldid="243" color="" topic="">i think that all the methods have advantages and disadvantages; and, more, they are usuful for distinct tasks</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="356">
                <Utterance genid="357" ref="355" time="07:14:19" date="09/12/2007" oldid="244" color="" topic="">yes, obviouly , jsut talikning about comparisons</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="358">
                <Utterance genid="359" ref="355" time="07:14:23" date="09/12/2007" oldid="245" color="" topic="">Clearly...</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="360">
                <Utterance genid="361" ref="352" time="07:14:43" date="09/12/2007" oldid="246" color="" topic="">hmm is very good for tagging, because svms cannot be used there</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="362">
                <Utterance genid="363" ref="357" time="07:14:46" date="09/12/2007" oldid="247" color="" topic="">(I have lots of links here, so jsut copyu -pasting them)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="364">
                <Utterance genid="365" ref="-1" time="07:14:49" date="09/12/2007" oldid="248" color="" topic="">we cannot define the hyperplane</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="366">
                <Utterance genid="367" ref="365" time="07:15:19" date="09/12/2007" oldid="249" color="" topic="">just o moment, I do not understand</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="368">
                <Utterance genid="369" ref="355" time="07:15:22" date="09/12/2007" oldid="250" color="" topic="">bayes is great for sense disambiguation</Utterance>
                <Utterance genid="370" ref="367" time="07:16:14" date="09/12/2007" oldid="251" color="" topic="">i'm saying that support vector machines cannot be used for tagging, because they do not use a probabilistic approach needed to do this task</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="371">
                <Utterance genid="372" ref="365" time="07:16:17" date="09/12/2007" oldid="252" color="" topic="">so SVM can not be used at ALL, or jsut not used efficiently</Utterance>
                <Utterance genid="373" ref="365" time="07:16:37" date="09/12/2007" oldid="253" color="" topic="">because from what ythey are saying it seems they are using tit...</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="374">
                <Utterance genid="375" ref="367" time="07:16:46" date="09/12/2007" oldid="254" color="" topic="">it's difficult and not efficient</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="376">
                <Utterance genid="377" ref="375" time="07:16:56" date="09/12/2007" oldid="255" color="" topic="">a, ok</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="378">
                <Utterance genid="379" ref="372" time="07:17:17" date="09/12/2007" oldid="256" color="" topic="">so they are not used for everything, neither is bayes or hmm</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="380">
                <Utterance genid="381" ref="375" time="07:17:27" date="09/12/2007" oldid="257" color="" topic="">a,m by the way of efficiency, for these HMMs, there are dynamic progaramming algorthms</Utterance>
                <Utterance genid="382" ref="381" time="07:17:35" date="09/12/2007" oldid="258" color="" topic="">==&gt;complexity</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="383">
                <Utterance genid="384" ref="381" time="07:18:31" date="09/12/2007" oldid="259" color="" topic="">they use the probabilties of pairs or even triplets of words ?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="385">
                <Utterance genid="386" ref="-1" time="07:19:58" date="09/12/2007" oldid="260" color="" topic="">I am not sure what exactely they are using, but (a very hazardous and totally unoinformed guessguess) I woul;d sayu paris (my first guess, but again totalyy without proff)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="387">
                <Utterance genid="388" ref="386" time="07:20:47" date="09/12/2007" oldid="261" color="" topic="">i think that it words on pairs and triplets</Utterance>
                <Utterance genid="389" ref="388" time="07:21:17" date="09/12/2007" oldid="262" color="" topic="">because the complexity increases very much if using more than triplets</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="390">
                <Utterance genid="391" ref="-1" time="07:21:51" date="09/12/2007" oldid="263" color="" topic="">because of all the possible compinations?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="392">
                <Utterance genid="393" ref="391" time="07:22:43" date="09/12/2007" oldid="264" color="" topic="">exactly</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="394">
                <Utterance genid="395" ref="393" time="07:23:01" date="09/12/2007" oldid="265" color="" topic="">probably so, but jsut out of curiosyty, what is the gain in acuyracy</Utterance>
                <Utterance genid="396" ref="395" time="07:23:42" date="09/12/2007" oldid="266" color="" topic="">for example, if using groups of 4 as agains tripelts, is the acucracy muych more improcved? (I guss this is a general question, neot necessarily related to HMM)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="397">
                <Utterance genid="398" ref="396" time="07:24:48" date="09/12/2007" oldid="267" color="" topic="">i know that for tagging the accuracy is great using just triplets; so the increase in complexity is just not worth it</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="399">
                <Utterance genid="400" ref="398" time="07:25:07" date="09/12/2007" oldid="268" color="" topic="">a</Utterance>
                <Utterance genid="401" ref="398" time="07:25:19" date="09/12/2007" oldid="269" color="" topic="">I have no idea babout thsi kind of comparison</Utterance>
                <Utterance genid="402" ref="-1" time="07:25:39" date="09/12/2007" oldid="270" color="" topic="">but what is the difference is acuracy/complexity gain for touples/triplets?</Utterance>
                <Utterance genid="403" ref="398" time="07:25:49" date="09/12/2007" oldid="271" color="" topic="">do you know?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="404">
                <Utterance genid="405" ref="400" time="07:25:50" date="09/12/2007" oldid="272" color="" topic="">that is the case for other techniques as well; svms can use different surfaces to separate the categories... but the process takes longer</Utterance>
                <Utterance genid="406" ref="403" time="07:28:09" date="09/12/2007" oldid="273" color="" topic="">right now, i do not have any actual data at hand</Utterance>
                <Utterance genid="407" ref="406" time="07:28:40" date="09/12/2007" oldid="274" color="" topic="">probably somewhere around 20% increase for tagging</Utterance>
                <Utterance genid="408" ref="-1" time="07:29:40" date="09/12/2007" oldid="275" color="" topic="">i guess we are close to the finish line, don't you think so ?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="409">
                <Utterance genid="410" ref="408" time="07:29:50" date="09/12/2007" oldid="276" color="" topic="">:)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="411">
                <Utterance genid="412" ref="-1" time="07:29:52" date="09/12/2007" oldid="277" color="" topic="">any observations, conclusions ?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="413">
                <Utterance genid="414" ref="-1" time="07:30:06" date="09/12/2007" oldid="278" color="" topic="">O do not know, this is generally it what I have collected about HMMs</Utterance>
                <Utterance genid="415" ref="412" time="07:30:30" date="09/12/2007" oldid="279" color="" topic="">probably I can describe more the theroretical part, but hte applications seemed very interesting</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="416">
                <Utterance genid="417" ref="412" time="07:30:35" date="09/12/2007" oldid="280" color="" topic="">I think we've been reaching conclusions for some time: modified algorithms are better.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="418">
                <Utterance genid="419" ref="417" time="07:31:08" date="09/12/2007" oldid="281" color="" topic="">yes, combined tehniques can be used with increased performances</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="420">
                <Utterance genid="421" ref="-1" time="07:31:31" date="09/12/2007" oldid="282" color="" topic="">toally ooftopic: quicsort and bubblestort</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="422">
                <Utterance genid="423" ref="417" time="07:31:40" date="09/12/2007" oldid="283" color="" topic="">i'm not sure about the time consumption for this tehnicques</Utterance>
                <Utterance genid="424" ref="421" time="07:31:54" date="09/12/2007" oldid="284" color="" topic="">usually qs, depends</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="425">
                <Utterance genid="426" ref="423" time="07:32:06" date="09/12/2007" oldid="285" color="" topic="">i swould say it coul d not be worse thatn the maximum of the two</Utterance>
                <Utterance genid="427" ref="426" time="07:32:13" date="09/12/2007" oldid="286" color="" topic="">right?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="428">
                <Utterance genid="429" ref="426" time="07:33:36" date="09/12/2007" oldid="287" color="" topic="">do not know; depending how they are used; if they are used only to find the same results and then use an heuristics to decide the result, asympoticaly the time complexisty is the same</Utterance>
                <Utterance genid="430" ref="429" time="07:33:50" date="09/12/2007" oldid="288" color="" topic="">but in practice, taking twice as much time is a problem</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="431">
                <Utterance genid="432" ref="430" time="07:34:51" date="09/12/2007" oldid="289" color="" topic="">a, true, the old time vs complexity</Utterance>
                <Utterance genid="433" ref="-1" time="07:35:23" date="09/12/2007" oldid="290" color="" topic="">any way, other concluzions? ..</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="434">
                <Utterance genid="435" ref="433" time="07:36:04" date="09/12/2007" oldid="291" color="" topic="">each tehnique can be used with good results in some tasks</Utterance>
                <Utterance genid="436" ref="435" time="07:36:23" date="09/12/2007" oldid="292" color="" topic="">so, depending on the task we have to solve, we must choose the right one</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="437">
                <Utterance genid="438" ref="436" time="07:36:27" date="09/12/2007" oldid="293" color="" topic="">yes, buy the way</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="439">
                <Utterance genid="440" ref="-1" time="07:36:37" date="09/12/2007" oldid="294" color="" topic="">it was a very interesting conversation to follow, the described methodes behave better or worse depending on different data sets and, as was said before, the combination of these or with other methods provide better results.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="441">
                <Utterance genid="442" ref="436" time="07:36:59" date="09/12/2007" oldid="295" color="" topic="">It actully took me so me time to find something whare HMM is better than SVM</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="443">
                <Utterance genid="444" ref="442" time="07:37:34" date="09/12/2007" oldid="296" color="" topic="">yup, but nevertheless, hmm are better than svm for some tasks, even in nlp</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="445">
                <Utterance genid="446" ref="442" time="07:37:39" date="09/12/2007" oldid="297" color="" topic="">because SVm seems to be a newer techniwue, but i nany case, as I said in those example.s there are lots of appplicatiosn whre this is not the case</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="447">
                <Utterance genid="448" ref="446" time="07:39:22" date="09/12/2007" oldid="298" color="" topic="">yes, svm is one of the newest techniques</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="449">
                <Utterance genid="450" ref="-1" time="07:40:09" date="09/12/2007" oldid="299" color="" topic="">Well, I say we should conclude the chat.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="451">
                <Utterance genid="452" ref="-1" time="07:40:11" date="09/12/2007" oldid="300" color="" topic="">one thing we have not talked about... other tehniques used for text classification</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="453">
                <Utterance genid="454" ref="452" time="07:40:22" date="09/12/2007" oldid="301" color="" topic="">come onnn</Utterance>
                <Utterance genid="455" ref="454" time="07:40:30" date="09/12/2007" oldid="302" color="" topic="">all those links ...:)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="456">
                <Utterance genid="457" ref="452" time="07:40:54" date="09/12/2007" oldid="303" color="" topic="">here, i must add the old decision trees classifiers... one of the first algorithms</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="458">
                <Utterance genid="459" ref="457" time="07:41:09" date="09/12/2007" oldid="304" color="" topic="">there was the one for customer behavior, the other for the enp;olyees bsed on mouse and viede data</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="460">
                <Utterance genid="461" ref="459" time="07:41:47" date="09/12/2007" oldid="305" color="" topic="">these is not text classification :P :D</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="462">
                <Utterance genid="463" ref="461" time="07:42:13" date="09/12/2007" oldid="306" color="" topic="">exactely,</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="464">
                <Utterance genid="465" ref="461" time="07:42:22" date="09/12/2007" oldid="307" color="" topic="">* these are not ...</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="466">
                <Utterance genid="467" ref="-1" time="07:42:37" date="09/12/2007" oldid="308" color="" topic="">aaa, sory, may bad, I thought you said , "toher than text classification"</Utterance>
                <Utterance genid="468" ref="465" time="07:43:01" date="09/12/2007" oldid="309" color="" topic="">but the "other " referred to techniwus, not applications , ok, my bad</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="469">
                <Utterance genid="470" ref="-1" time="07:43:30" date="09/12/2007" oldid="310" color="" topic="">in my opinion we're pretty done</Utterance>
                <Utterance genid="471" ref="-1" time="07:43:40" date="09/12/2007" oldid="311" color="" topic="">i have told what i had to say</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="472">
                <Utterance genid="473" ref="470" time="07:43:49" date="09/12/2007" oldid="312" color="" topic="">I agree.</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="474">
                <Utterance genid="475" ref="-1" time="07:43:58" date="09/12/2007" oldid="313" color="" topic="">me too.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="476">
                <Utterance genid="477" ref="470" time="07:44:12" date="09/12/2007" oldid="314" color="" topic="">ok, so are trere more techniques we wknow about (probably there are, but do you tknow about them?) or</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="478">
                <Utterance genid="479" ref="477" time="07:45:29" date="09/12/2007" oldid="315" color="" topic="">i'm sure there are... but i do not know them very well</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="480">
                <Utterance genid="481" ref="470" time="07:47:00" date="09/12/2007" oldid="316" color="" topic="">k, same opinion here</Utterance>
                <Utterance genid="482" ref="470" time="07:47:03" date="09/12/2007" oldid="317" color="" topic="">same here</Utterance>
                <Utterance genid="483" ref="470" time="07:47:04" date="09/12/2007" oldid="318" color="" topic="">(this thinkg does not send the messeges)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="484">
                <Utterance genid="485" ref="482" time="07:47:56" date="09/12/2007" oldid="319" color="" topic="">ok; so it's time to conclude our chat and start some other sunday evening activity</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="486">
                <Utterance genid="487" ref="485" time="07:48:13" date="09/12/2007" oldid="320" color="" topic="">:) treu, treu</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="488">
                <Utterance genid="489" ref="485" time="07:48:16" date="09/12/2007" oldid="321" color="" topic="">like doing nothing :D or watching k1</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="490">
                <Utterance genid="491" ref="485" time="07:48:41" date="09/12/2007" oldid="322" color="" topic="">It's been nice talking to you. Bye!</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="492">
                <Utterance genid="493" ref="485" time="07:48:58" date="09/12/2007" oldid="323" color="" topic="">soo, good bye?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="494">
                <Utterance genid="495" ref="491" time="07:48:59" date="09/12/2007" oldid="324" color="" topic="">same here! have a pleasant evening!</Utterance>
                <Utterance genid="496" ref="-1" time="07:49:28" date="09/12/2007" oldid="325" color="" topic="">one last question: who is going to send the transcipt ?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="497">
                <Utterance genid="498" ref="-1" time="07:49:45" date="09/12/2007" oldid="326" color="" topic="">I'll send it.</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="499">
                <Utterance genid="500" ref="-1" time="07:50:14" date="09/12/2007" oldid="327" color="" topic="">ok, it was a great conversation</Utterance>
                <Utterance genid="501" ref="-1" time="07:50:24" date="09/12/2007" oldid="328" color="" topic="">i nice evening to all of you.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="502">
                <Utterance genid="503" ref="498" time="07:50:26" date="09/12/2007" oldid="329" color="" topic="">perfect!</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="504">
                <Utterance genid="505" ref="-1" time="07:50:38" date="09/12/2007" oldid="330" color="" topic="">*a</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="506">
                <Utterance genid="507" ref="-1" time="07:50:53" date="09/12/2007" oldid="331" color="" topic="">ciao!</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="508">
                <Utterance genid="509" ref="-1" time="07:51:01" date="09/12/2007" oldid="332" color="" topic="">sal</Utterance>
            </Turn>
            <Turn nickname="(#user3#)" genid="511">
                <Utterance genid="512" ref="-1" time="07:51:16" date="09/12/2007" oldid="334" color="" topic="">chuss! :)</Utterance>
            </Turn>
        </Body>
    </Dialog>