private SpeechLipSyncLine CompilePAMFile(string fileName, CompileMessages errors) { SpeechLipSyncLine syncDataForThisFile = new SpeechLipSyncLine(); syncDataForThisFile.FileName = Path.GetFileNameWithoutExtension(fileName); string thisLine; bool inMainSection = false; int lineNumber = 0; StreamReader sr = new StreamReader(fileName); while ((thisLine = sr.ReadLine()) != null) { lineNumber++; if (thisLine.ToLower().StartsWith("[speech]")) { inMainSection = true; continue; } if (inMainSection) { if (thisLine.TrimStart().StartsWith("[")) { // moved onto another section break; } if (thisLine.IndexOf(':') > 0) { string[] parts = thisLine.Split(':'); // Convert from Pamela XPOS into milliseconds int milliSeconds = ((Convert.ToInt32(parts[0]) / 15) * 1000) / 24; string phenomeCode = parts[1].Trim().ToUpper(); int frameID = FindFrameNumberForPhenome(phenomeCode); if (frameID < 0) { string friendlyFileName = Path.GetFileName(fileName); errors.Add(new CompileError("No frame found to match phenome code '" + phenomeCode + "'", friendlyFileName, lineNumber)); } else { syncDataForThisFile.Phenomes.Add(new SpeechLipSyncPhenome(milliSeconds, (short)frameID)); } } } } sr.Close(); syncDataForThisFile.Phenomes.Sort(); // The PAM file contains start times: Convert to end times for (int i = 0; i < syncDataForThisFile.Phenomes.Count - 1; i++) { syncDataForThisFile.Phenomes[i].EndTimeOffset = syncDataForThisFile.Phenomes[i + 1].EndTimeOffset; } if (syncDataForThisFile.Phenomes.Count > 1) { syncDataForThisFile.Phenomes[syncDataForThisFile.Phenomes.Count - 1].EndTimeOffset = syncDataForThisFile.Phenomes[syncDataForThisFile.Phenomes.Count - 2].EndTimeOffset + 1000; } return syncDataForThisFile; }