    <Dialog team="m4" id="" name="" date="" time="" description="" subject="">
        <Body>
            <Topics/>
            <Turn nickname="(#user0#)" genid="4">
                <Utterance genid="5" ref="-1" time="09:10:11" date="08/12/2007" oldid="4" color="" topic="">are you ready to start the conference?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="6">
                <Utterance genid="7" ref="-1" time="09:11:51" date="08/12/2007" oldid="5" color="" topic="">yes, who goes first?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="8">
                <Utterance genid="9" ref="-1" time="09:12:31" date="08/12/2007" oldid="6" color="" topic="">first, we should state our topics</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="10">
                <Utterance genid="11" ref="-1" time="09:13:00" date="08/12/2007" oldid="7" color="" topic="">ok, I'm here to talk about maximum entropy</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="12">
                <Utterance genid="13" ref="-1" time="09:13:00" date="08/12/2007" oldid="8" color="" topic="">I will talk about HMM - the Hidden Markov Model</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="14">
                <Utterance genid="15" ref="-1" time="09:13:38" date="08/12/2007" oldid="9" color="" topic="">I will support Support vector machine (SVM)</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="16">
                <Utterance genid="17" ref="-1" time="09:13:51" date="08/12/2007" oldid="10" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="18">
                <Utterance genid="19" ref="-1" time="09:14:04" date="08/12/2007" oldid="11" color="" topic="">now who wants to go first to talk describe his domain?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="20">
                <Utterance genid="21" ref="-1" time="09:14:42" date="08/12/2007" oldid="12" color="" topic="">this will be a difficult task</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="22">
                <Utterance genid="23" ref="-1" time="09:15:05" date="08/12/2007" oldid="13" color="" topic="">A SVM is an algorithm which computes the linear separation surface with maximum margin for a given training set.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="24">
                <Utterance genid="25" ref="-1" time="09:15:19" date="08/12/2007" oldid="14" color="" topic="">I believe all of us read about some algorithms</Utterance>
                <Utterance genid="26" ref="-1" time="09:15:34" date="08/12/2007" oldid="15" color="" topic="">:)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="27">
                <Utterance genid="28" ref="-1" time="09:15:55" date="08/12/2007" oldid="16" color="" topic="">Only a subset of the input vectors will in influence the choice of the margin such vectors are called support vectors.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="29">
                <Utterance genid="30" ref="-1" time="09:15:59" date="08/12/2007" oldid="17" color="" topic="">even so, we should talk about the basics firs</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="31">
                <Utterance genid="32" ref="25" time="09:16:57" date="08/12/2007" oldid="18" color="" topic="">I agree</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="33">
                <Utterance genid="34" ref="-1" time="09:17:31" date="08/12/2007" oldid="19" color="" topic="">I hope you still remember the basics about linear algebra :)</Utterance>
                <Utterance genid="35" ref="-1" time="09:17:52" date="08/12/2007" oldid="20" color="" topic="">My method uses a lot of that</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="36">
                <Utterance genid="37" ref="-1" time="09:18:28" date="08/12/2007" oldid="21" color="" topic="">so does mine, but it depends on how much we should go into details about the math behind it</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="38">
                <Utterance genid="39" ref="-1" time="09:19:25" date="08/12/2007" oldid="22" color="" topic="">my method is based on probabilities, so I believe it will be easy to follow</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="40">
                <Utterance genid="41" ref="23" time="09:19:47" date="08/12/2007" oldid="23" color="" topic="">I will keep it informal</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="42">
                <Utterance genid="43" ref="39" time="09:19:52" date="08/12/2007" oldid="24" color="" topic=""/>
            </Turn>
            <Turn nickname="(#user2#)" genid="44">
                <Utterance genid="45" ref="-1" time="09:20:10" date="08/12/2007" oldid="25" color="" topic="">Basically, SVM is a machine learning technique.</Utterance>
                <Utterance genid="46" ref="-1" time="09:20:36" date="08/12/2007" oldid="26" color="" topic="">you start with a given large set of data</Utterance>
                <Utterance genid="47" ref="-1" time="09:20:59" date="08/12/2007" oldid="27" color="" topic="">and the algoritm classifies it</Utterance>
                <Utterance genid="48" ref="-1" time="09:21:15" date="08/12/2007" oldid="28" color="" topic="">SVM uses a binary classification</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="49">
                <Utterance genid="50" ref="48" time="09:21:55" date="08/12/2007" oldid="29" color="" topic="">what does it mean?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="51">
                <Utterance genid="52" ref="-1" time="09:22:37" date="08/12/2007" oldid="30" color="" topic="">For example a yes or no classification</Utterance>
                <Utterance genid="53" ref="-1" time="09:23:01" date="08/12/2007" oldid="31" color="" topic="">A white or black one</Utterance>
                <Utterance genid="54" ref="-1" time="09:23:46" date="08/12/2007" oldid="32" color="" topic="">You have two classes and you are able to decide wich data belongs to wich class</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="55">
                <Utterance genid="56" ref="52" time="09:24:29" date="08/12/2007" oldid="33" color="" topic="">so this method is limited to this type of problems?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="57">
                <Utterance genid="58" ref="-1" time="09:25:26" date="08/12/2007" oldid="34" color="" topic="">you have to map youre problem to this constraint</Utterance>
                <Utterance genid="59" ref="-1" time="09:25:49" date="08/12/2007" oldid="35" color="" topic="">Support vector machines map input vectors to a higher dimensional space where a maximal separating hyperplane is constructed.</Utterance>
                <Utterance genid="60" ref="-1" time="09:28:42" date="08/12/2007" oldid="36" color="" topic="">Each of these data points belongs to only one of two classes.</Utterance>
                <Utterance genid="61" ref="-1" time="09:29:08" date="08/12/2007" oldid="37" color="" topic="">Each data point will be represented by a p-dimensional vector (a list of p numbers).</Utterance>
                <Utterance genid="62" ref="-1" time="09:29:29" date="08/12/2007" oldid="38" color="" topic="">We are interested in whether we can separate them with a "p minus 1" dimensional hyperplane. This is a typical form of linear classifier.</Utterance>
                <Utterance genid="63" ref="-1" time="09:29:52" date="08/12/2007" oldid="39" color="" topic="">There are many linear classifiers that might satisfy this property. However, we are additionally interested in finding out if we can achieve maximum separation (margin) between the two classes.</Utterance>
                <Utterance genid="64" ref="-1" time="09:30:38" date="08/12/2007" oldid="40" color="" topic="">We pick the hyperplane so that the distance from the hyperplane to the nearest data point is maximized.</Utterance>
                <Utterance genid="65" ref="-1" time="09:30:54" date="08/12/2007" oldid="41" color="" topic="">In our case L2</Utterance>
                <Utterance genid="66" ref="-1" time="09:31:38" date="08/12/2007" oldid="42" color="" topic="">SVM is well known to be a powerful binary classifier.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="67">
                <Utterance genid="68" ref="-1" time="09:32:01" date="08/12/2007" oldid="43" color="" topic="">did you find any comparison with other classifiers?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="69">
                <Utterance genid="70" ref="-1" time="09:32:45" date="08/12/2007" oldid="44" color="" topic="">The SVM approach is computationally efficient in training and classification, and it comes with a learning theory that can guide real-world applications.</Utterance>
                <Utterance genid="71" ref="-1" time="09:36:23" date="08/12/2007" oldid="45" color="" topic="">In particular, it can generalize well when there are many fewer training examples in one class than the other.</Utterance>
                <Utterance genid="72" ref="-1" time="09:37:06" date="08/12/2007" oldid="46" color="" topic="">SVM has low binary error and an ability to generalize well when the distribution of training examples is uneven.</Utterance>
                <Utterance genid="73" ref="-1" time="09:37:38" date="08/12/2007" oldid="47" color="" topic="">When a linear separation surface does not exist, for example in presence of noisy data, SVMs algorithms with a slack variable are appropriate.</Utterance>
                <Utterance genid="74" ref="-1" time="09:38:23" date="08/12/2007" oldid="48" color="" topic="">An advantage of this supervised learning is that it can be easily adapted to new operating conditions by providing representative training examples for the new condition.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="75">
                <Utterance genid="76" ref="73" time="09:38:33" date="08/12/2007" oldid="49" color="" topic="">this is a interesting method</Utterance>
                <Utterance genid="77" ref="-1" time="09:39:18" date="08/12/2007" oldid="50" color="" topic="">my question: can you give some use examples?</Utterance>
                <Utterance genid="78" ref="-1" time="09:39:45" date="08/12/2007" oldid="51" color="" topic="">what about the implementation?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="79">
                <Utterance genid="80" ref="-1" time="09:40:18" date="08/12/2007" oldid="52" color="" topic="">This method has been aplied with succes in a wide area of aplications</Utterance>
                <Utterance genid="81" ref="-1" time="09:40:50" date="08/12/2007" oldid="53" color="" topic="">Speech recognition, Computational lexical semantics (word meanings)</Utterance>
                <Utterance genid="82" ref="-1" time="09:42:29" date="08/12/2007" oldid="54" color="" topic="">Semantic role labeling (word meanings and sentence meanings)</Utterance>
                <Utterance genid="83" ref="-1" time="09:43:17" date="08/12/2007" oldid="55" color="" topic="">Information extraction and text classification</Utterance>
                <Utterance genid="84" ref="-1" time="09:43:22" date="08/12/2007" oldid="56" color="" topic="">to name only a few</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="85">
                <Utterance genid="86" ref="-1" time="09:43:41" date="08/12/2007" oldid="57" color="" topic="">that seems very useful, but do you know anything about the performance, the time it takes for the algorithm to reach a conclusion?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="87">
                <Utterance genid="88" ref="-1" time="09:45:54" date="08/12/2007" oldid="58" color="" topic="">The time is most of the time as well as or better than other classification methods</Utterance>
                <Utterance genid="89" ref="-1" time="09:46:14" date="08/12/2007" oldid="59" color="" topic="">It depends a lot on the specific problem it's aplied to</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="90">
                <Utterance genid="91" ref="-1" time="09:46:56" date="08/12/2007" oldid="60" color="" topic="">that's very good,</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="92">
                <Utterance genid="93" ref="89" time="09:47:11" date="08/12/2007" oldid="61" color="" topic="">can you give an example so that we can understand better</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="94">
                <Utterance genid="95" ref="93" time="09:48:56" date="08/12/2007" oldid="62" color="" topic="">Presenting the solution by means of small subset of training points gives enormous computational advantages.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="96">
                <Utterance genid="97" ref="-1" time="09:51:02" date="08/12/2007" oldid="63" color="" topic="">performance is the main focus for a method I had in mind</Utterance>
                <Utterance genid="98" ref="-1" time="09:52:02" date="08/12/2007" oldid="64" color="" topic="">I'd like to talk about Maximum Entropy, which relies on statistical methods for classification</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="99">
                <Utterance genid="100" ref="97" time="09:52:46" date="08/12/2007" oldid="65" color="" topic="">I agree it's all about performance and we should talk more about performance comparison</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="101">
                <Utterance genid="102" ref="-1" time="09:53:51" date="08/12/2007" oldid="66" color="" topic="">i have seen a few performance tests but unfortunately I haven't found too much about SVM from this perspective</Utterance>
                <Utterance genid="103" ref="-1" time="09:56:18" date="08/12/2007" oldid="67" color="" topic="">however, maximum entropy is a very simple and a very fast method</Utterance>
                <Utterance genid="104" ref="-1" time="09:57:15" date="08/12/2007" oldid="68" color="" topic="">it relies on the principle that when nothing is known, the probability distribution should be uniform, so we have maximum entropy</Utterance>
                <Utterance genid="105" ref="-1" time="09:57:59" date="08/12/2007" oldid="69" color="" topic="">it relies on a set of labeled training data which is analized</Utterance>
                <Utterance genid="106" ref="-1" time="09:58:25" date="08/12/2007" oldid="70" color="" topic="">and then any input data is classified according to this training data</Utterance>
                <Utterance genid="107" ref="-1" time="10:01:08" date="08/12/2007" oldid="71" color="" topic="">from what I could find this method is faster but not as accurate as others</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="108">
                <Utterance genid="109" ref="106" time="10:01:19" date="08/12/2007" oldid="72" color="" topic="">One disadvantage of the maximum-entropy model is exactly the learning cost of its parameters.</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="111">
                <Utterance genid="112" ref="107" time="10:03:26" date="08/12/2007" oldid="74" color="" topic="">For SVM an advantage is the possibility of trading off the required accuracy with the number of Support Vectors.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="115">
                <Utterance genid="116" ref="-1" time="10:09:05" date="08/12/2007" oldid="77" color="" topic="">this is an accuracy test between Maximum Entropy and Naive Bayes for text classification</Utterance>
                <Utterance genid="117" ref="-1" time="10:09:51" date="08/12/2007" oldid="78" color="" topic="">maximum entropy is definitely less accurate but we can see that it gets closer for large vocabulary sizez</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="119">
                <Utterance genid="120" ref="-1" time="10:10:21" date="08/12/2007" oldid="80" color="" topic="">I was disconnected</Utterance>
                <Utterance genid="121" ref="-1" time="10:13:48" date="08/12/2007" oldid="81" color="" topic="">is there anybody here?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="122">
                <Utterance genid="123" ref="-1" time="10:13:58" date="08/12/2007" oldid="82" color="" topic="">we're here</Utterance>
                <Utterance genid="124" ref="-1" time="10:14:10" date="08/12/2007" oldid="83" color="" topic="">are you still having connection problems?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="125">
                <Utterance genid="126" ref="-1" time="10:16:12" date="08/12/2007" oldid="84" color="" topic="">unfortunatelly yes</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="127">
                <Utterance genid="128" ref="-1" time="10:16:53" date="08/12/2007" oldid="85" color="" topic="">should we continue or try again tomorrow?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="129">
                <Utterance genid="130" ref="-1" time="10:18:09" date="08/12/2007" oldid="86" color="" topic="">please continue</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="131">
                <Utterance genid="132" ref="-1" time="10:18:14" date="08/12/2007" oldid="87" color="" topic="">ok</Utterance>
                <Utterance genid="133" ref="-1" time="10:19:39" date="08/12/2007" oldid="88" color="" topic="">the maximum entropy method relies on the training data to estimate a number of constraints and then the input data is classified based on them</Utterance>
                <Utterance genid="134" ref="-1" time="10:20:28" date="08/12/2007" oldid="89" color="" topic="">for example in the case of book classification during the first phase the algoritm can generate statistical classification of types of books based on word count</Utterance>
                <Utterance genid="135" ref="-1" time="10:21:17" date="08/12/2007" oldid="90" color="" topic="">the obvious advantage here is that it's not limited to binary classification</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="136">
                <Utterance genid="137" ref="135" time="10:23:47" date="08/12/2007" oldid="91" color="" topic="">I would point out as a disadvantage the complexity of theory (maximum entropy) and practice</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="138">
                <Utterance genid="139" ref="137" time="10:25:04" date="08/12/2007" oldid="92" color="" topic="">that's true, it seems more complex than SVM</Utterance>
                <Utterance genid="140" ref="-1" time="10:25:33" date="08/12/2007" oldid="93" color="" topic="">but for a general problem which cannot be easily reduced to a binary one, this is a pretty good solution</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="141">
                <Utterance genid="142" ref="-1" time="10:27:34" date="08/12/2007" oldid="94" color="" topic="">so both of you have solutions to classification problems using advanced mathematics</Utterance>
                <Utterance genid="143" ref="-1" time="10:29:28" date="08/12/2007" oldid="95" color="" topic="">one of the solution is easier to understand and possibly to implement, but requires a problem transformation because it does binary classification - svm</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="144">
                <Utterance genid="145" ref="140" time="10:29:40" date="08/12/2007" oldid="96" color="" topic="">Maximum entropy models are designed to handle feature overlap but a very high degree of overlap requires more iterations of the maximum entropy estimation routine and can lead to numerical diculties.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="146">
                <Utterance genid="147" ref="143" time="10:30:21" date="08/12/2007" oldid="97" color="" topic="">that is correct, it's clear that there is no perfect method for any kind of problem</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="148">
                <Utterance genid="149" ref="-1" time="10:30:43" date="08/12/2007" oldid="98" color="" topic="">and the other one, maximum entropy - easier to implement and doesn't require special transformation of the problem</Utterance>
                <Utterance genid="150" ref="-1" time="10:31:10" date="08/12/2007" oldid="99" color="" topic="">my method is based on probabilities, statistical information</Utterance>
                <Utterance genid="151" ref="-1" time="10:31:59" date="08/12/2007" oldid="100" color="" topic="">and I believe it will be easier for you to follow if I refer to text tagging problems</Utterance>
                <Utterance genid="152" ref="-1" time="10:32:16" date="08/12/2007" oldid="101" color="" topic="">this is a kind of classification</Utterance>
                <Utterance genid="153" ref="-1" time="10:32:55" date="08/12/2007" oldid="102" color="" topic="">but instead of using complicated algebra, it uses probabilities</Utterance>
                <Utterance genid="154" ref="-1" time="10:33:44" date="08/12/2007" oldid="103" color="" topic="">as an example, we can consider: text tagging - associating words with pos (part of speech)</Utterance>
                <Utterance genid="155" ref="-1" time="10:34:19" date="08/12/2007" oldid="104" color="" topic="">other applications: machine translation, speech recognizing</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="156">
                <Utterance genid="157" ref="153" time="10:35:34" date="08/12/2007" oldid="105" color="" topic="">could you tell us a bit about how it works/</Utterance>
                <Utterance genid="158" ref="-1" time="10:35:35" date="08/12/2007" oldid="106" color="" topic="">?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="159">
                <Utterance genid="160" ref="-1" time="10:35:40" date="08/12/2007" oldid="107" color="" topic="">ok</Utterance>
                <Utterance genid="161" ref="-1" time="10:35:45" date="08/12/2007" oldid="108" color="" topic="">of course</Utterance>
                <Utterance genid="162" ref="-1" time="10:35:58" date="08/12/2007" oldid="109" color="" topic="">if you are patient</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="163">
                <Utterance genid="164" ref="-1" time="10:36:06" date="08/12/2007" oldid="110" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="165">
                <Utterance genid="166" ref="-1" time="10:36:07" date="08/12/2007" oldid="111" color="" topic="">I will give you all the information</Utterance>
                <Utterance genid="167" ref="-1" time="10:36:23" date="08/12/2007" oldid="112" color="" topic="">fell free to interrupt me and ask me any question</Utterance>
                <Utterance genid="168" ref="-1" time="10:36:28" date="08/12/2007" oldid="113" color="" topic="">so</Utterance>
                <Utterance genid="169" ref="-1" time="10:37:01" date="08/12/2007" oldid="114" color="" topic="">you all know what the part of speech means right?</Utterance>
                <Utterance genid="170" ref="-1" time="10:37:09" date="08/12/2007" oldid="115" color="" topic="">these are verbs, nouns etc.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="171">
                <Utterance genid="172" ref="-1" time="10:37:21" date="08/12/2007" oldid="116" color="" topic="">of course</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="173">
                <Utterance genid="174" ref="-1" time="10:37:36" date="08/12/2007" oldid="117" color="" topic="">I just wanted to test if you are still here</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="175">
                <Utterance genid="176" ref="-1" time="10:38:08" date="08/12/2007" oldid="118" color="" topic="">here and paying attention</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="177">
                <Utterance genid="178" ref="153" time="10:38:09" date="08/12/2007" oldid="119" color="" topic="">Your method makes very large assumptions about the data.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="179">
                <Utterance genid="180" ref="-1" time="10:38:38" date="08/12/2007" oldid="120" color="" topic="">I'm talking about text classification</Utterance>
                <Utterance genid="181" ref="-1" time="10:38:45" date="08/12/2007" oldid="121" color="" topic="">at this point</Utterance>
                <Utterance genid="182" ref="-1" time="10:39:11" date="08/12/2007" oldid="122" color="" topic="">but the method can also apply to speech recognition for example</Utterance>
                <Utterance genid="183" ref="-1" time="10:39:39" date="08/12/2007" oldid="123" color="" topic="">in that case sounds are used for classification</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="184">
                <Utterance genid="185" ref="183" time="10:40:44" date="08/12/2007" oldid="124" color="" topic="">interesting, so you're saying that it can be used for any type of input data</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="186">
                <Utterance genid="187" ref="-1" time="10:41:14" date="08/12/2007" oldid="125" color="" topic="">it is based on "experience"</Utterance>
                <Utterance genid="188" ref="-1" time="10:41:32" date="08/12/2007" oldid="126" color="" topic="">you have a corpus</Utterance>
                <Utterance genid="189" ref="-1" time="10:42:16" date="08/12/2007" oldid="127" color="" topic="">and given this corpus you can determine 2 probabilities: likelihood and prior</Utterance>
                <Utterance genid="190" ref="-1" time="10:42:29" date="08/12/2007" oldid="128" color="" topic="">the corpus is the experience</Utterance>
                <Utterance genid="191" ref="-1" time="10:42:49" date="08/12/2007" oldid="129" color="" topic="">a huge text with all the necessary pos marking</Utterance>
                <Utterance genid="192" ref="-1" time="10:43:30" date="08/12/2007" oldid="130" color="" topic="">the likelihood means the probability that a verb is "is" for instance</Utterance>
                <Utterance genid="193" ref="-1" time="10:44:02" date="08/12/2007" oldid="131" color="" topic="">the prior is the probability that a verb is followed by a adverb for example</Utterance>
                <Utterance genid="194" ref="-1" time="10:44:16" date="08/12/2007" oldid="132" color="" topic="">as input you have a phrase</Utterance>
                <Utterance genid="195" ref="-1" time="10:44:35" date="08/12/2007" oldid="133" color="" topic="">as output you get a pos sequence</Utterance>
                <Utterance genid="196" ref="-1" time="10:45:20" date="08/12/2007" oldid="134" color="" topic="">this is just an example of how hidden markov model can be used</Utterance>
                <Utterance genid="197" ref="-1" time="10:46:47" date="08/12/2007" oldid="135" color="" topic="">to generalize, the hidden markov model can be described as a set of "hidden" state, transition between them and observations</Utterance>
                <Utterance genid="198" ref="-1" time="10:47:07" date="08/12/2007" oldid="136" color="" topic="">I will show you a picture in order to better understand the general concept</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="199">
                <Utterance genid="200" ref="193" time="10:48:46" date="08/12/2007" oldid="137" color="" topic="">you talked about the prior, does this mean that the method ignores the sequences that are after the word it's tagging and only takes into account the ones before it?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="201">
                <Utterance genid="202" ref="-1" time="10:49:03" date="08/12/2007" oldid="138" color="" topic="">yes</Utterance>
                <Utterance genid="203" ref="-1" time="10:49:14" date="08/12/2007" oldid="139" color="" topic="">this a simplification</Utterance>
                <Utterance genid="204" ref="203" time="10:52:10" date="08/12/2007" oldid="140" color="" topic="">in fact this is not the only one</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="205">
                <Utterance genid="206" ref="203" time="10:52:27" date="08/12/2007" oldid="141" color="" topic="">The number of parameters that need to be set in an HMM is huge. For a simple four-state HMM, with five continuous channels, there would be a total of 50 parameters that would need to be evaluated.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="207">
                <Utterance genid="208" ref="-1" time="10:53:01" date="08/12/2007" oldid="142" color="" topic="">sorry, what king of channel we are talking about?</Utterance>
                <Utterance genid="209" ref="208" time="10:53:43" date="08/12/2007" oldid="143" color="" topic="">kind :)</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="210">
                <Utterance genid="211" ref="206" time="10:53:47" date="08/12/2007" oldid="144" color="" topic="">As a result of the above, the amount of data that is required to train an HMM is very large.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="212">
                <Utterance genid="213" ref="206" time="10:55:12" date="08/12/2007" oldid="145" color="" topic="">I didn't understand this, what are these channels about?</Utterance>
                <Utterance genid="214" ref="-1" time="10:55:25" date="08/12/2007" oldid="146" color="" topic="">speech recognizing maybe?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="215">
                <Utterance genid="216" ref="208" time="10:55:34" date="08/12/2007" oldid="147" color="" topic="">For example the symbol emission matrix of an HMM can be split into two channels: a static channel to maintain the validity of the HMM and a dynamic channel that is modified to maximize the separable distance.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="217">
                <Utterance genid="218" ref="216" time="10:58:57" date="08/12/2007" oldid="148" color="" topic="">I don't exactly know about the problem you are talking about, if you can elaborate I would appreciate that</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="219">
                <Utterance genid="220" ref="216" time="11:01:06" date="08/12/2007" oldid="149" color="" topic="">could the channels be separate input streams or an internal mechanism of the algorithm?</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="221">
                <Utterance genid="222" ref="-1" time="11:02:15" date="08/12/2007" oldid="150" color="" topic="">It's related to the internal mechanism</Utterance>
                <Utterance genid="223" ref="-1" time="11:02:36" date="08/12/2007" oldid="151" color="" topic="">it was just an example I found</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="224">
                <Utterance genid="225" ref="222" time="11:02:47" date="08/12/2007" oldid="152" color="" topic="">so you're talking about an implementation</Utterance>
                <Utterance genid="226" ref="-1" time="11:02:55" date="08/12/2007" oldid="153" color="" topic="">not about the general idea</Utterance>
                <Utterance genid="227" ref="-1" time="11:02:58" date="08/12/2007" oldid="154" color="" topic="">:)</Utterance>
                <Utterance genid="228" ref="-1" time="11:03:44" date="08/12/2007" oldid="155" color="" topic="">ok</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="229">
                <Utterance genid="230" ref="-1" time="11:03:50" date="08/12/2007" oldid="156" color="" topic="">the only important fact was that for a HMM with 4 states you have 50 parameters</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="231">
                <Utterance genid="232" ref="-1" time="11:04:40" date="08/12/2007" oldid="157" color="" topic="">not necessarly, it depends on the transitions between states and those depend on the problem you want to solve</Utterance>
                <Utterance genid="233" ref="-1" time="11:05:14" date="08/12/2007" oldid="158" color="" topic="">you are right to say that the implementations of this method have a lot of info behind</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="234">
                <Utterance genid="235" ref="-1" time="11:05:47" date="08/12/2007" oldid="159" color="" topic="">but i think we can reach the conclusion that it's more complicated to solve a given problem using HMM than with other methods. I think that was the point</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="236">
                <Utterance genid="237" ref="-1" time="11:05:51" date="08/12/2007" oldid="160" color="" topic="">from that huge amount of information, statitistics are gathered and a lot of parameters could appear</Utterance>
                <Utterance genid="238" ref="-1" time="11:06:09" date="08/12/2007" oldid="161" color="" topic="">I don't agree</Utterance>
                <Utterance genid="239" ref="-1" time="11:06:34" date="08/12/2007" oldid="162" color="" topic="">you have a O(n^3) complexity to do the text tagging I talked before</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="240">
                <Utterance genid="241" ref="238" time="11:07:00" date="08/12/2007" oldid="163" color="" topic="">While the basic theory is elegant, by the time you get to an implementation, several additions have to be made to the simple algorithm.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="242">
                <Utterance genid="243" ref="-1" time="11:07:10" date="08/12/2007" oldid="164" color="" topic="">check the viterbi algorithm</Utterance>
                <Utterance genid="244" ref="241" time="11:07:39" date="08/12/2007" oldid="165" color="" topic="">not really</Utterance>
                <Utterance genid="245" ref="-1" time="11:08:26" date="08/12/2007" oldid="166" color="" topic="">the data behind the solution is huge but the solution is simple</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="246">
                <Utterance genid="247" ref="-1" time="11:08:26" date="08/12/2007" oldid="167" color="" topic="">For example parameter-tying, handling of continuous values, adjustments to the state duration model, adding null emissions.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="248">
                <Utterance genid="249" ref="241" time="11:09:07" date="08/12/2007" oldid="168" color="" topic="">I wan't to clarify the problem</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="250">
                <Utterance genid="251" ref="247" time="11:09:10" date="08/12/2007" oldid="169" color="" topic="">from what I can se the difficulty of this method can vary a lot depending on the specific classification problem that we need to solve</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="252">
                <Utterance genid="253" ref="-1" time="11:09:31" date="08/12/2007" oldid="170" color="" topic="">want</Utterance>
                <Utterance genid="254" ref="-1" time="11:10:18" date="08/12/2007" oldid="171" color="" topic="">yes it's true, it's more difficult to do speech recognizing than text tagging</Utterance>
                <Utterance genid="255" ref="-1" time="11:10:46" date="08/12/2007" oldid="172" color="" topic="">but at the top application level, algorithm, things may look the same</Utterance>
                <Utterance genid="256" ref="-1" time="11:10:53" date="08/12/2007" oldid="173" color="" topic="">take a look at the picture</Utterance>
                <Utterance genid="257" ref="-1" time="11:11:13" date="08/12/2007" oldid="174" color="" topic="">that is the basic concept behind the Markov method</Utterance>
                <Utterance genid="258" ref="-1" time="11:11:25" date="08/12/2007" oldid="175" color="" topic="">x1 x2 x3 are states</Utterance>
                <Utterance genid="259" ref="-1" time="11:11:45" date="08/12/2007" oldid="176" color="" topic="">a.. are the probabilities between states - prior</Utterance>
                <Utterance genid="260" ref="-1" time="11:12:12" date="08/12/2007" oldid="177" color="" topic="">b11 is the probability of x1 being y1</Utterance>
                <Utterance genid="261" ref="-1" time="11:13:11" date="08/12/2007" oldid="178" color="" topic="">one of the problems: given the observations Y determine the most likely associated X</Utterance>
                <Utterance genid="262" ref="-1" time="11:13:18" date="08/12/2007" oldid="179" color="" topic="">states</Utterance>
                <Utterance genid="263" ref="-1" time="11:13:33" date="08/12/2007" oldid="180" color="" topic="">I told</Utterance>
                <Utterance genid="264" ref="-1" time="11:14:08" date="08/12/2007" oldid="181" color="" topic="">I told you - behind this there is a lot of data which is analyzed and some parameters are extracted</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="265">
                <Utterance genid="266" ref="264" time="11:15:01" date="08/12/2007" oldid="182" color="" topic="">The concept learnt by a hidden Markov model is the emission and transition probabilities. If one is trying to understand the concept learnt by the hidden Markov model, then this concept representation is difficult to understand. In speech recognition, this issue is of little significance, but in other domains, it may be even more important than accuracy.</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="267">
                <Utterance genid="268" ref="259" time="11:15:17" date="08/12/2007" oldid="183" color="" topic="">and the number of states and the relationships between them are determined for a specific problem?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="269">
                <Utterance genid="270" ref="-1" time="11:15:40" date="08/12/2007" oldid="184" color="" topic="">yes</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="271">
                <Utterance genid="272" ref="-1" time="11:16:12" date="08/12/2007" oldid="185" color="" topic="">i see</Utterance>
                <Utterance genid="273" ref="-1" time="11:16:30" date="08/12/2007" oldid="186" color="" topic="">do you have any performance data?</Utterance>
                <Utterance genid="274" ref="-1" time="11:16:45" date="08/12/2007" oldid="187" color="" topic="">or a comparison with other methods in terms of accuracy?</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="275">
                <Utterance genid="276" ref="266" time="11:17:35" date="08/12/2007" oldid="188" color="" topic="">the method parameters depend on the type of problem which is to be solved</Utterance>
                <Utterance genid="277" ref="274" time="11:18:24" date="08/12/2007" oldid="189" color="" topic="">because it's based on statistics, accuracy could be an issue</Utterance>
                <Utterance genid="278" ref="274" time="11:20:11" date="08/12/2007" oldid="190" color="" topic="">in what performance is concerned, there are some simplifications in the model that provide the basis for a O(n^3) complexity implementation</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="279">
                <Utterance genid="280" ref="278" time="11:20:28" date="08/12/2007" oldid="191" color="" topic="">Over-fitting is another problem of HMM. The algorithm is only as good as your training set but more training is not always good.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="281">
                <Utterance genid="282" ref="-1" time="11:21:05" date="08/12/2007" oldid="192" color="" topic="">I agree, is like I said before - accuracy could be an issue of this method</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="283">
                <Utterance genid="284" ref="280" time="11:21:39" date="08/12/2007" oldid="193" color="" topic="">so you're saying that the first part of setting the parameters and the training determines the general accuracy</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="285">
                <Utterance genid="286" ref="-1" time="11:22:12" date="08/12/2007" oldid="194" color="" topic="">yes, the parameter configuration is very important in this method</Utterance>
                <Utterance genid="287" ref="-1" time="11:23:10" date="08/12/2007" oldid="195" color="" topic="">if you don't have good reliable background data which can be used for statistics then things won't work fine</Utterance>
            </Turn>
            <Turn nickname="(#user2#)" genid="288">
                <Utterance genid="289" ref="287" time="11:23:45" date="08/12/2007" oldid="196" color="" topic="">Also the algorithm finds local maximums, so the model may not converge to a truly optimal parameter set for a given training set.</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="290">
                <Utterance genid="291" ref="-1" time="11:23:48" date="08/12/2007" oldid="197" color="" topic="">to conclude, there are two parts to any implementation of hidden markov model:</Utterance>
                <Utterance genid="292" ref="-1" time="11:25:02" date="08/12/2007" oldid="198" color="" topic="">1st the parameter determination and setting based on special background data</Utterance>
                <Utterance genid="293" ref="-1" time="11:26:05" date="08/12/2007" oldid="199" color="" topic="">2nd the solutin to the actual problem</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="294">
                <Utterance genid="295" ref="-1" time="11:26:37" date="08/12/2007" oldid="200" color="" topic="">i see that one of the possible applications for this is cryptanalysis, but a statistical approach doesn't seem too reliable for this</Utterance>
                <Utterance genid="296" ref="-1" time="11:27:12" date="08/12/2007" oldid="201" color="" topic="">though I haven't found any results for real applications</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="297">
                <Utterance genid="298" ref="295" time="11:28:19" date="08/12/2007" oldid="202" color="" topic="">well, I don't know exactly how it can be used for decoding encrypted information</Utterance>
                <Utterance genid="299" ref="-1" time="11:29:06" date="08/12/2007" oldid="203" color="" topic="">this is a very difficult problem</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="300">
                <Utterance genid="301" ref="-1" time="11:29:48" date="08/12/2007" oldid="204" color="" topic="">maybe it's more related to the tagging problem, but instead of tagging parts of speech, it tries to assign a clear text word for each crypted sequence</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="302">
                <Utterance genid="303" ref="299" time="11:30:07" date="08/12/2007" oldid="205" color="" topic="">yes</Utterance>
                <Utterance genid="304" ref="301" time="11:31:46" date="08/12/2007" oldid="206" color="" topic="">but the crypted sequence is the result of a function which has as parameters the text and other like passwords or symetrical keys or asymetrical keys</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="305">
                <Utterance genid="306" ref="304" time="11:32:20" date="08/12/2007" oldid="207" color="" topic="">yes, this seems to be the logical approach</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="307">
                <Utterance genid="308" ref="304" time="11:32:21" date="08/12/2007" oldid="208" color="" topic="">so the encrypted text can not be linked so easily to a clear text</Utterance>
                <Utterance genid="309" ref="-1" time="11:32:35" date="08/12/2007" oldid="209" color="" topic="">do you agree?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="310">
                <Utterance genid="311" ref="-1" time="11:33:02" date="08/12/2007" oldid="210" color="" topic="">yes</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="312">
                <Utterance genid="313" ref="311" time="11:33:51" date="08/12/2007" oldid="211" color="" topic="">anyway I will search infos about the use of HMM in cryptanalisys</Utterance>
                <Utterance genid="314" ref="-1" time="11:34:24" date="08/12/2007" oldid="212" color="" topic="">at this point, I don't see the method as a good solution for cryptanalisys</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="315">
                <Utterance genid="316" ref="-1" time="11:35:18" date="08/12/2007" oldid="213" color="" topic="">from what I found statistical methods can only be used when you know that the clear text is a complete sentence, or a real text</Utterance>
                <Utterance genid="317" ref="-1" time="11:36:17" date="08/12/2007" oldid="214" color="" topic="">for example, you know what language the text is in, you know what's the most common word in that language and you assume that the most common sequence of characters in your encrypted text is that word</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="318">
                <Utterance genid="319" ref="316" time="11:36:35" date="08/12/2007" oldid="215" color="" topic="">I wonder about the used metric</Utterance>
                <Utterance genid="320" ref="317" time="11:37:41" date="08/12/2007" oldid="216" color="" topic="">you're right and you know that the currently used encryption methods are not so simple</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="321">
                <Utterance genid="322" ref="-1" time="11:38:49" date="08/12/2007" oldid="217" color="" topic="">of course they're not and I still don't see how statistical methods could be effective in this case, but it's interesting that they can be used like this</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="323">
                <Utterance genid="324" ref="320" time="11:39:00" date="08/12/2007" oldid="218" color="" topic="">this should have been a "but"</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="325">
                <Utterance genid="326" ref="-1" time="11:51:45" date="08/12/2007" oldid="219" color="" topic="">in Maximum Entropy the steps are a little different from HMM</Utterance>
                <Utterance genid="327" ref="-1" time="11:52:36" date="08/12/2007" oldid="220" color="" topic="">ME uses the training data provided to generate a set of contraints based on the labels assigned to it</Utterance>
                <Utterance genid="328" ref="-1" time="11:53:32" date="08/12/2007" oldid="221" color="" topic="">and based on these constraints the method determines that the input sets belong to a class with a certain probability</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="329">
                <Utterance genid="330" ref="328" time="11:55:06" date="08/12/2007" oldid="222" color="" topic="">so your method is also a statistical approach</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="331">
                <Utterance genid="332" ref="-1" time="11:55:14" date="08/12/2007" oldid="223" color="" topic="">yes</Utterance>
                <Utterance genid="333" ref="330" time="11:55:40" date="08/12/2007" oldid="224" color="" topic="">it is, but it doesn't use a state automata to determine the rules for classification</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="334">
                <Utterance genid="335" ref="-1" time="11:55:42" date="08/12/2007" oldid="225" color="" topic="">what about the number of constraints to be configured?</Utterance>
                <Utterance genid="336" ref="-1" time="11:56:03" date="08/12/2007" oldid="226" color="" topic="">because in HMM there are a lot of parameters to be configured</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="337">
                <Utterance genid="338" ref="-1" time="11:56:03" date="08/12/2007" oldid="227" color="" topic="">the constraints depend on the specific problem</Utterance>
                <Utterance genid="339" ref="-1" time="11:56:23" date="08/12/2007" oldid="228" color="" topic="">they seem to be more like conclusions from the training set</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="340">
                <Utterance genid="341" ref="338" time="11:56:54" date="08/12/2007" oldid="229" color="" topic="">I agree but would you say there are many or less than in HMM for the same problem?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="342">
                <Utterance genid="343" ref="-1" time="11:59:11" date="08/12/2007" oldid="230" color="" topic="">i think that they are less</Utterance>
                <Utterance genid="344" ref="-1" time="11:59:50" date="08/12/2007" oldid="231" color="" topic="">and also they seem to be less powerful since they depend on the size and distribution of the training sets</Utterance>
            </Turn>
            <Turn nickname="(#user0#)" genid="345">
                <Utterance genid="346" ref="343" time="11:59:56" date="08/12/2007" oldid="232" color="" topic="">this could mean you have less parameters but more operations to be executed, right?</Utterance>
            </Turn>
            <Turn nickname="(#user1#)" genid="347">
                <Utterance genid="348" ref="346" time="12:00:48" date="09/12/2007" oldid="233" color="" topic="">yes, I believe so</Utterance>
            </Turn>
        </Body>
    </Dialog>