@inproceedings{d9c1d4ce1ce740a188d7e3c07b3c1ab9,

title = "Double digest revisited : Complexity and approximability in the presence of noisy data",

abstract = "We revisit the Double Digest problem, which occurs in sequencing of large DNA strings and consists of reconstructing the relative positions of cut sites from two different enzymes: we first show that Double Digest is strongly NP-complete, improving upon previous results that only showed weak NP-completeness. Even the (experimentally more meaningful) variation in which we disallow coincident cut sites turns out to be strongly NP-complete. In a second part, we model errors in data as they occur in real-life experiments: we propose several optimization variations of Double Digest that model partial cleavage errors. We then show APX-completeness for most of these variations. In a third part, we investigate these variations with the additional restriction that conincident cut sites are disallowed, and we show that it is NP-hard to even find feasible solutions in this case, thus making it impossible to guarantee any approximation ratio at all.",

author = "M. Cieliebak and S. Eidenbenz and G.J. Woeginger",

year = "2003",

doi = "10.1007/3-540-45071-8_52",

language = "English",

isbn = "3-540-40534-8",

series = "Lecture Notes in Computer Science",

publisher = "Springer",

pages = "519--527",

editor = "T. Warnow and B. Zhu",

booktitle = "Proceedings of the 9th International Computing and Combinatorics Conference (COCOON'2003, Big Sky MT, USA, July 25-28, 2003)",

address = "Germany",

}