Import haproxy_2.0.13-2.debian.tar.xz
authorVincent Bernat <bernat@debian.org>
Wed, 1 Apr 2020 19:49:32 +0000 (20:49 +0100)
committerVincent Bernat <bernat@debian.org>
Wed, 1 Apr 2020 19:49:32 +0000 (20:49 +0100)
[dgit import tarball haproxy 2.0.13-2 haproxy_2.0.13-2.debian.tar.xz]

72 files changed:
NEWS [new file with mode: 0644]
changelog [new file with mode: 0644]
clean [new file with mode: 0644]
compat [new file with mode: 0644]
control [new file with mode: 0644]
copyright [new file with mode: 0644]
dconv/LICENSE [new file with mode: 0644]
dconv/NOTICE [new file with mode: 0644]
dconv/README.md [new file with mode: 0644]
dconv/css/check.png [new file with mode: 0644]
dconv/css/cross.png [new file with mode: 0644]
dconv/css/page.css [new file with mode: 0644]
dconv/haproxy-dconv.py [new file with mode: 0755]
dconv/img/logo-med.png [new file with mode: 0644]
dconv/js/typeahead.bundle.js [new file with mode: 0644]
dconv/parser/__init__.py [new file with mode: 0644]
dconv/parser/arguments.py [new file with mode: 0644]
dconv/parser/example.py [new file with mode: 0644]
dconv/parser/keyword.py [new file with mode: 0644]
dconv/parser/seealso.py [new file with mode: 0644]
dconv/parser/table.py [new file with mode: 0644]
dconv/parser/underline.py [new file with mode: 0644]
dconv/templates/parser/arguments.tpl [new file with mode: 0644]
dconv/templates/parser/example.tpl [new file with mode: 0644]
dconv/templates/parser/example/comment.tpl [new file with mode: 0644]
dconv/templates/parser/seealso.tpl [new file with mode: 0644]
dconv/templates/parser/table.tpl [new file with mode: 0644]
dconv/templates/parser/table/header.tpl [new file with mode: 0644]
dconv/templates/parser/table/row.tpl [new file with mode: 0644]
dconv/templates/parser/underline.tpl [new file with mode: 0644]
dconv/templates/summary.html [new file with mode: 0644]
dconv/templates/template.html [new file with mode: 0644]
dconv/tools/generate-docs.sh [new file with mode: 0755]
gbp.conf [new file with mode: 0644]
halog.1 [new file with mode: 0644]
haproxy-doc.doc-base.haproxy [new file with mode: 0644]
haproxy-doc.doc-base.haproxy-lua [new file with mode: 0644]
haproxy-doc.docs [new file with mode: 0644]
haproxy-doc.install [new file with mode: 0644]
haproxy-doc.links [new file with mode: 0644]
haproxy-doc.maintscript [new file with mode: 0644]
haproxy.README.Debian [new file with mode: 0644]
haproxy.cfg [new file with mode: 0644]
haproxy.default [new file with mode: 0644]
haproxy.dirs [new file with mode: 0644]
haproxy.docs [new file with mode: 0644]
haproxy.examples [new file with mode: 0644]
haproxy.init [new file with mode: 0644]
haproxy.install [new file with mode: 0644]
haproxy.maintscript [new file with mode: 0644]
haproxy.manpages [new file with mode: 0644]
haproxy.postinst [new file with mode: 0644]
haproxy.postrm [new file with mode: 0644]
haproxy.tmpfile [new file with mode: 0644]
haproxy.vim [new file with mode: 0644]
logrotate.conf [new file with mode: 0644]
patches/0001-BUG-CRITICAL-hpack-never-index-a-header-into-the-hea.patch [new file with mode: 0644]
patches/0002-Use-dpkg-buildflags-to-build-halog.patch [new file with mode: 0644]
patches/debianize-dconv.patch [new file with mode: 0644]
patches/haproxy.service-add-documentation.patch [new file with mode: 0644]
patches/haproxy.service-start-after-syslog.patch [new file with mode: 0644]
patches/series [new file with mode: 0644]
rsyslog.conf [new file with mode: 0644]
rules [new file with mode: 0755]
source/format [new file with mode: 0644]
source/include-binaries [new file with mode: 0644]
tests/cli [new file with mode: 0644]
tests/control [new file with mode: 0644]
tests/proxy-localhost [new file with mode: 0644]
vim-haproxy.install [new file with mode: 0644]
vim-haproxy.yaml [new file with mode: 0644]
watch [new file with mode: 0644]

diff --git a/NEWS b/NEWS
new file mode 100644 (file)
index 0000000..1edb770
--- /dev/null
+++ b/NEWS
@@ -0,0 +1,35 @@
+haproxy (1.8.0-1) experimental; urgency=medium
+
+  In order to upgrade to the HAProxy 1.8 new process model, a full service
+  restart will be performed automatically on upgrade from pre-1.8 versions
+  when running under systemd. This incurs (minimal) service downtime and
+  will only be performed once; future upgrades will reload haproxy seamlessly.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Tue, 28 Nov 2017 23:44:01 +0200
+
+haproxy (1.4.23-1) unstable; urgency=low
+
+  As of 1.4.23-1, the Debian package ships an rsyslog snippet to allow logging
+  via /dev/log from chrooted HAProxy processes. If you are using rsyslog, you
+  should restart rsyslog after installing this package to enable HAProxy to log
+  via rsyslog. See /usr/share/doc/haproxy/README.Debian for more details.
+
+  Also note that as of 1.4.23-1, chrooting the HAProxy process is enabled in the
+  default Debian configuration.
+
+ -- Apollon Oikonomopoulos <apoikos@gmail.com>  Thu, 25 Apr 2013 23:26:35 +0300
+
+haproxy (1.4.13-1) unstable; urgency=low
+
+  Maintainer of this package has changed. 
+
+ -- Christo Buschek <crito@30loops.net>  Mon, 10 Mar 2011 22:07:10 +0100
+
+haproxy (1.3.14.2-1) unstable; urgency=low
+
+  Configuration has moved to /etc/haproxy/haproxy.cfg.  This allows to add the
+  configurable /etc/haproxy/errors directory.
+  The haproxy binary was also moved to /usr/sbin rather than /usr/bin, update
+  your init script or reinstall the one provided with the package.
+
+ -- Arnaud Cornet <acornet@debian.org>  Mon, 21 Jan 2008 23:38:15 +0100
diff --git a/changelog b/changelog
new file mode 100644 (file)
index 0000000..5c9fe8b
--- /dev/null
+++ b/changelog
@@ -0,0 +1,1802 @@
+haproxy (2.0.13-2) unstable; urgency=medium
+
+  * d/dconv: replace cgi.escape by html.escape. Closes: #951416.
+  * d/copryight: document OpenSSL exception. Closes: #951782.
+  * d/haproxy.cfg: use "ssl-min-ver" to set minimum version.
+  * Apply one patch to fix an overflow in HTTP/2 header handling.
+    Fix CVE-2020-11100.
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 01 Apr 2020 21:49:32 +0200
+
+haproxy (2.0.13-1) unstable; urgency=medium
+
+  * New upstream release.
+    - BUG/MAJOR: hashes: fix the signedness of the hash inputs
+    - BUG/MAJOR: memory: Don't forget to unlock the rwlock if the pool is
+                 empty.
+  * d/dconv: use Python 3 to build the documentation.
+    Closes: #948296, #950435.
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 15 Feb 2020 15:32:32 +0100
+
+haproxy (2.0.12-1) unstable; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: task: add a new TASK_SHARED_WQ flag to fix foreign requeuing
+  * d/logrotate.conf: use rsyslog helper instead of SysV init script.
+    Closes: #946973.
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 20 Dec 2019 08:20:33 +0100
+
+haproxy (2.0.11-1) unstable; urgency=medium
+
+  * New upstream release.
+    - BUG/MAJOR: dns: add minimalist error processing on the Rx path
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 13 Dec 2019 19:22:03 +0100
+
+haproxy (2.0.10-1) unstable; urgency=medium
+
+  * New upstream release.
+    - BUG/MAJOR: h2: make header field name filtering stronger
+    - BUG/MAJOR: h2: reject header values containing invalid chars
+    - BUG/MAJOR: mux-h2: don't try to decode a response HEADERS frame in
+                 idle state
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 26 Nov 2019 13:22:17 +0100
+
+haproxy (2.0.9-1) unstable; urgency=medium
+
+  * New upstream release.
+    - BUG/MAJOR: stream-int: Don't receive data from mux until SI_ST_EST
+                 is reached
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 16 Nov 2019 17:38:51 +0100
+
+haproxy (2.0.8-1) unstable; urgency=medium
+
+  * New upstream release.
+    - BUG/MAJOR: idle conns: schedule the cleanup task on the correct
+                 threads
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 23 Oct 2019 08:55:55 +0200
+
+haproxy (2.0.7-1) unstable; urgency=medium
+
+  * New upstream release.  
+    - BUG/MAJOR: mux-h2: Handle HEADERS frames received after a RST_STREAM
+                 frame
+    - BUG/MAJOR: mux_h2: Don't consume more payload than received for
+                 skipped frames
+    - BUG/MEDIUM: checks: make sure the connection is ready before trying
+                  to recv
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 27 Sep 2019 19:14:12 +0200
+
+haproxy (2.0.6-2) unstable; urgency=medium
+
+  * d/patches: fix regression with checks.
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 18 Sep 2019 08:02:53 +0200
+
+haproxy (2.0.6-1) unstable; urgency=medium
+
+  * New upstream release.
+    - BUG/MAJOR: ssl: ssl_sock was not fully initialized.
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 13 Sep 2019 21:25:38 +0200
+
+haproxy (2.0.5-1) unstable; urgency=medium
+
+  * New upstream release.
+    - BUG/MEDIUM: mux_h1: Don't bother subscribing in recv if we're not
+                  connected.
+    - BUG/MEDIUM: mux_pt: Don't call unsubscribe if we did not subscribe.
+    - BUG/MEDIUM: proxy: Don't forget the SF_HTX flag when upgrading
+                  TCP=>H1+HTX.
+    - BUG/MEDIUM: proxy: Don't use cs_destroy() when freeing the
+                  conn_stream.
+    - BUG/MEDIUM: stick-table: Wrong stick-table backends parsing.
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 16 Aug 2019 19:51:24 +0200
+
+haproxy (2.0.4-1) unstable; urgency=medium
+
+  * New upstream release. Upload to unstable.
+    - BUG/MAJOR: http/sample: use a static buffer for raw -> htx
+                 conversion
+    - BUG/MAJOR: queue/threads: avoid an AB/BA locking issue in
+                 process_srv_queue()
+  * d/haproxy.cfg: update default cipher lists to more secure defaults.
+    TLSv1.0 and TLSv1.1 are disabled, as well as TLS tickets (they are
+    breaking forward secrecy unless correctly rotated).
+    Closes: #932763.
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 09 Aug 2019 14:22:23 +0200
+
+haproxy (2.0.3-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/CRITICAL: http_ana: Fix parsing of malformed cookies which start by
+                    a delimiter (CVE-2019-14241)
+    - BUG/MEDIUM: checks: Don't attempt to receive data if we already
+                  subscribed.
+    - BUG/MEDIUM: http/htx: unbreak option http_proxy
+    - DOC: htx: Update comments in HTX files
+    - BUG/MEDIUM: mux-h1: Trim excess server data at the end of a transaction
+    - BUG/MEDIUM: tcp-checks: do not dereference inexisting conn_stream
+  * Bump Standards-Version to 4.4.0; no changes needed
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Tue, 23 Jul 2019 13:31:31 -0300
+
+haproxy (2.0.2-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: listener: fix thread safety in resume_listener()
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 17 Jul 2019 12:19:54 +0200
+
+haproxy (2.0.1-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: sample: Wrong stick-table name parsing in "if/unless" ACL
+                 condition.
+    - BUG/MAJOR: mux-h1: Don't crush trash chunk area when outgoing
+                 message is formatted
+  * d/rules: fix crash during reload due to libgcc_s.so missing when
+    chrooted.
+
+ -- Vincent Bernat <bernat@debian.org>  Mon, 24 Jun 2019 19:28:26 +0200
+
+haproxy (2.0.0-1) experimental; urgency=medium
+
+  * New upstream version.
+  * d/watch: update to follow 2.0.
+  * d/gbp.conf: update for 2.0 and experimental.
+  * d/rules: update to use linux-glibc target.
+  * d/rules: enable prometheus exporter.
+  * d/patches: refresh patches.
+  * d/vim-haproxy.install: update path to vim syntax file.
+  * d/README.Debian: remove outdated information.
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 20 Jun 2019 11:40:19 +0200
+
+haproxy (1.9.8-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: map/acl: real fix segfault during show map/acl on CLI
+    - BUG/MAJOR: mux-h2: do not add a stream twice to the send list
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 16 May 2019 01:50:10 +0200
+
+haproxy (1.9.7-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: http_fetch: Get the channel depending on the keyword used
+    - BUG/MAJOR: lb/threads: fix AB/BA locking issue in round-robin LB
+    - BUG/MAJOR: lb/threads: fix insufficient locking on round-robin LB
+    - BUG/MAJOR: muxes: Use the HTX mode to find the best mux for HTTP
+                 proxies only
+    - BUG/MAJOR: task: make sure never to delete a queued task
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 28 Apr 2019 17:37:04 +0200
+
+haproxy (1.9.6-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: checks: segfault during tcpcheck_main
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 30 Mar 2019 12:43:33 +0100
+
+haproxy (1.9.5-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: cache/htx: Set the start-line offset when a cached object
+                 is served
+    - BUG/MAJOR: fd/threads, task/threads: ensure all spin locks are
+                 unlocked
+    - BUG/MAJOR: listener: Make sure the listener exist before using it.
+    - BUG/MAJOR: mux-h2: fix race condition between close on both ends
+    - BUG/MAJOR: spoe: Don't try to get agent config during SPOP
+                 healthcheck
+    - BUG/MAJOR: spoe: Fix initialization of thread-dependent fields
+    - BUG/MAJOR: stats: Fix how huge POST data are read from the channel
+    - BUG/MAJOR: stream: avoid double free on unique_id
+    - BUG/MAJOR: tasks: Use the TASK_GLOBAL flag to know if we're in the
+                 global rq.
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 19 Mar 2019 20:13:48 +0100
+
+haproxy (1.9.4-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: config: verify that targets of track-sc and stick rules
+                 are present
+    - BUG/MAJOR: htx/backend: Make all tests on HTTP messages compatible
+                 with HTX
+    - BUG/MAJOR: spoe: verify that backends used by SPOE cover all their
+                 callers' processes
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 07 Feb 2019 12:48:42 +0100
+
+haproxy (1.9.3-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: mux-h2: don't destroy the stream on failed allocation in
+                 h2_snd_buf()
+    - BUG/MEDIUM: checks: fix recent regression on agent-check making it
+                  crash
+    - BUG/MEDIUM: ssl: Fix handling of TLS 1.3 KeyUpdate messages
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 29 Jan 2019 12:59:10 +0100
+
+haproxy (1.9.2-1) experimental; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: cache: fix confusion between zero and uninitialized cache
+                 key
+    - BUG/MEDIUM: checks: Avoid having an associated server for email
+                  checks.
+    - BUG/MEDIUM: connection: properly unregister the mux on failed
+                  initialization
+    - BUG/MEDIUM: h1: Get the h1m state when restarting the headers
+                  parsing
+    - BUG/MEDIUM: h1: Make sure we destroy an inactive connectin that did
+                  shutw.
+    - BUG/MEDIUM: init: Initialize idle_orphan_conns for first server in
+                  server-template
+    - BUG/MEDIUM: mux-h2: decode trailers in HEADERS frames
+    - BUG/MEDIUM: ssl: Disable anti-replay protection and set max data
+                  with 0RTT.
+    - BUG/MEDIUM: ssl: missing allocation failure checks loading tls key
+                  file
+    - BUG/MEDIUM: stats: Get the right scope pointer depending on HTX is
+                  used or not
+  * d/patches: removal of CVE-2018-20615.patch (applied upstream)
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 17 Jan 2019 19:19:27 +0100
+
+haproxy (1.9.0-2) experimental; urgency=medium
+
+  * Fix out-of-bounds read in HTTP2 mux (CVE-2018-20615).
+    Possible crash in H2 HEADERS frame decoder when the PRIORITY flag
+    is present, due to a missing frame size check.
+  * Bump Standards-Version to 4.3.0; no changes needed.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Thu, 03 Jan 2019 12:41:02 +0200
+
+haproxy (1.9.0-1) experimental; urgency=medium
+
+  * New upstream version 1.9.0.
+    See https://www.haproxy.com/blog/haproxy-1-9-has-arrived/.
+  * d/watch: update to follow 1.9.
+  * d/gbp.conf: update for 1.9 and experimental.
+  * d/rules: do not override CFLAGS, hijack DEBUG_CFLAGS for this instead.
+  * d/patches: add regression fix for DNS.
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 21 Dec 2018 11:13:41 +0100
+
+haproxy (1.8.15-1) unstable; urgency=high
+
+  [ Vincent Bernat ]
+  * d/rules: switch to pcre2. Closes: #911933.
+
+  [ Apollon Oikonomopoulos ]
+  * New upstream version 1.8.15
+    - BUG: dns: Fix off-by-one write in dns_validate_dns_response() (
+    - BUG: dns: Fix out-of-bounds read via signedness error in
+      dns_validate_dns_response()
+    - BUG: dns: Prevent out-of-bounds read in dns_read_name()
+    - BUG: dns: Prevent out-of-bounds read in dns_validate_dns_response()
+      (CVE-2018-20102, closes: #916308)
+    - BUG: dns: Prevent stack-exhaustion via recursion loop in dns_read_name
+      (CVE-2018-20103, closes: #916307)
+    - BUG/MAJOR: http: http_txn_get_path() may deference an inexisting buffer
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Fri, 14 Dec 2018 15:31:04 +0200
+
+haproxy (1.8.14-1) unstable; urgency=medium
+
+  * New upstream version.
+    - BUG/CRITICAL: hpack: fix improper sign check on the header index
+                    value (already fixed in 1.8.13-2)
+    - BUG/MAJOR: kqueue: Don't reset the changes number by accident.
+    - BUG/MAJOR: thread: lua: Wrong SSL context initialization.
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 23 Sep 2018 12:25:03 +0200
+
+haproxy (1.8.13-2) unstable; urgency=high
+
+  * Fix improper sign check on the HPACK header index value (CVE-2018-14645)
+  * Bump Standards-Version to 4.2.1; no changes needed
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Wed, 19 Sep 2018 22:46:58 +0300
+
+haproxy (1.8.13-1) unstable; urgency=medium
+
+  * New upstream version.
+    - BUG/MEDIUM: h2: don't accept new streams if conn_streams are still
+                  in excess
+    - BUG/MEDIUM: h2: make sure the last stream closes the connection
+                  after a timeout
+    - BUG/MEDIUM: h2: never leave pending data in the output buffer on close
+    - BUG/MEDIUM: h2: prevent orphaned streams from blocking a connection
+                  forever
+    - BUG/MEDIUM: stats: don't ask for more data as long as we're responding
+    - BUG/MEDIUM: stream-int: don't immediately enable reading when the
+                  buffer was reportedly full
+    - BUG/MEDIUM: threads/sync: use sched_yield when available
+    - BUG/MEDIUM: threads: Fix the exit condition of the thread barrier
+    - BUG/MEDIUM: threads: properly fix nbthreads == MAX_THREADS
+    - BUG/MEDIUM: threads: unbreak "bind" referencing an incorrect thread
+                  number
+  * d/patches: drop systemd exit status patch (applied upstream).
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 01 Aug 2018 11:36:20 +0200
+
+haproxy (1.8.12-1) unstable; urgency=medium
+
+  * New upstream version.
+     - BUG/MAJOR: stick_table: Complete incomplete SEGV fix
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 27 Jun 2018 20:05:50 +0200
+
+haproxy (1.8.11-1) unstable; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: Stick-tables crash with segfault when the key is not in
+                 the stick-table
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 26 Jun 2018 18:26:05 +0200
+
+haproxy (1.8.10-1) unstable; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: lua: Dead lock with sockets
+    - BUG/MAJOR: map: fix a segfault when using http-request set-map
+    - BUG/MAJOR: ssl: OpenSSL context is stored in non-reserved memory slot
+    - BUG/MAJOR: ssl: Random crash with cipherlist capture
+    - BUG/MEDIUM: cache: don't cache when an Authorization header is present
+    - BUG/MEDIUM: dns: Delay the attempt to run a DNS resolution on check
+                  failure.
+    - BUG/MEDIUM: fd: Don't modify the update_mask in fd_dodelete().
+    - BUG/MEDIUM: fd: Only check update_mask against all_threads_mask.
+    - BUG/MEDIUM: servers: Add srv_addr default placeholder to the state file
+    - BUG/MEDIUM: stick-tables: Decrement ref_cnt in table_* converters
+    - BUG/MEDIUM: threads: Use the sync point to check active jobs and exit
+    - BUG/MEDIUM: threads: handle signal queue only in thread 0
+  * Remove patch from CVE. Included upstream.
+  * d/patches: add a patch for clean stop with systemd.
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 22 Jun 2018 20:21:37 +0200
+
+haproxy (1.8.9-2) unstable; urgency=high
+
+  * d/patches: fix CVE-2018-11469: do not cache when an Authorization
+    header is present. Closes: #900084.
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 26 May 2018 16:05:07 +0200
+
+haproxy (1.8.9-1) unstable; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: channel: Fix crash when trying to read from a closed socket
+    - BUG/MEDIUM: h2: implement missing support for chunked encoded uploads
+    - BUG/MEDIUM: http: don't always abort transfers on CF_SHUTR
+    - BUG/MEDIUM: lua: Fix segmentation fault if a Lua task exits
+    - BUG/MEDIUM: pollers: Use a global list for fd shared between threads
+    - BUG/MEDIUM: ssl: properly protect SSL cert generation
+    - BUG/MEDIUM: task: Don't free a task that is about to be run
+    - BUG/MEDIUM: threads: Fix the sync point for more than 32 threads
+  * d/rsyslog.conf: use modern syntax and statements, thanks to Guillem
+    Jover. Closes: #897914.
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 19 May 2018 15:00:17 +0200
+
+haproxy (1.8.8-1) unstable; urgency=high
+
+  * New upstream version.
+    - BUG/CRITICAL: h2: fix incorrect frame length check
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 19 Apr 2018 17:51:55 +0200
+
+haproxy (1.8.7-1) unstable; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: cache: always initialize newly created objects
+  * d/control: switch maintainer address to tracker.debian.org.
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 07 Apr 2018 07:58:34 +0200
+
+haproxy (1.8.6-1) unstable; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: cache: fix random crashes caused by incorrect delete() on
+                 non-first blocks
+    - BUG/MAJOR: h2: remove orphaned streams from the send list before closing
+    - BUG/MEDIUM: h2/threads: never release the task outside of the task
+                  handler
+    - BUG/MEDIUM: h2: always add a stream to the send or fctl list when
+                  blocked
+    - BUG/MEDIUM: h2: don't consider pending data on detach if connection
+                  is in error
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 05 Apr 2018 21:08:12 +0200
+
+haproxy (1.8.5-1) unstable; urgency=medium
+
+  * New upstream version.
+    - BUG/MAJOR: threads/queue: Fix thread-safety issues on the queues
+                 management
+    - BUG/MEDIUM: buffer: Fix the wrapping case in bi_putblk
+    - BUG/MEDIUM: buffer: Fix the wrapping case in bo_putblk
+    - BUG/MEDIUM: fix a 100% cpu usage with cpu-map and nbthread/nbproc
+    - BUG/MEDIUM: h2: also arm the h2 timeout when sending
+    - BUG/MEDIUM: h2: always consume any trailing data after end of output
+                  buffers
+    - BUG/MEDIUM: h2: properly account for DATA padding in flow control
+    - BUG/MEDIUM: http: Switch the HTTP response in tunnel mode as earlier
+                  as possible
+    - BUG/MEDIUM: spoe: Remove idle applets from idle list when HAProxy is
+                  stopping
+    - BUG/MEDIUM: ssl/sample: ssl_bc_* fetch keywords are broken.
+    - BUG/MEDIUM: ssl: Don't always treat SSL_ERROR_SYSCALL as
+                  unrecovarable.
+    - BUG/MEDIUM: ssl: Shutdown the connection for reading on
+                  SSL_ERROR_SYSCALL
+    - BUG/MEDIUM: tcp-check: single connect rule can't detect DOWN servers
+    - BUG/MEDIUM: threads/queue: wake up other threads upon dequeue
+    - BUG/MEDIUM: threads/unix: Fix a deadlock when a listener is
+                  temporarily disabled
+  * Upload to unstable.
+  * d/control: update Vcs-* fields to salsa.debian.org.
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 25 Mar 2018 11:31:25 +0200
+
+haproxy (1.8.4-1) experimental; urgency=medium
+
+  * New upstream stable release.
+  * d/patches: document why dconv patch is not in series.
+  * d/docs: ship NOTICE file in haproxy-doc.
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 10 Feb 2018 08:43:36 +0100
+
+haproxy (1.8.3-1) experimental; urgency=medium
+
+  * New upstream stable release.
+  * Change default configuration of stats socket to support hitless
+    reload.
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 02 Jan 2018 18:48:24 +0100
+
+haproxy (1.8.2-1) experimental; urgency=medium
+
+  * New upstream stable release
+  * Refresh patches
+  * Bump Standards-Version to 4.1.2; no changes needed
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Sun, 24 Dec 2017 14:28:28 +0200
+
+haproxy (1.8.1-1) experimental; urgency=medium
+
+  * New upstream stable release.
+  * Enable PCRE JIT.
+  * systemd: replace Wants/After=syslog.service with After=rsyslog.service
+    (Closes: #882610)
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Sun, 03 Dec 2017 23:59:03 +0200
+
+haproxy (1.8.0-2) experimental; urgency=medium
+
+  * Use libatomic on platforms without 64-bit atomics. Fixes FTBFS on armel,
+    mips, mipsel, powerpc, powerpcspe, sh4 and m68k.
+  * d/rules: use variables defined in architecture.mk and buildflags.mk
+  * d/rules: drop unreachable else case.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Wed, 29 Nov 2017 01:21:40 +0200
+
+haproxy (1.8.0-1) experimental; urgency=medium
+
+  * New upstream stable series. Notable new features include:
+    + HTTP/2 support
+    + Support for multiple worker threads to allow scalability across CPUs
+      (e.g. for SSL termination)
+    + Seamless reloads
+    + HTTP small object caching
+    + Dynamic backend server configuration
+    See https://www.haproxy.com/blog/whats-new-haproxy-1-8/ and
+    https://www.mail-archive.com/haproxy@formilux.org/msg28004.html for more
+    detailed descriptions of the new features.
+  * Upload to experimental
+  * Refresh all patches.
+  * d/watch: switch to the 1.8.x upstream stable series
+  * Bump Standards to 4.1.1
+    + Switch haproxy-doc to Priority: optional from extra.
+  * Bump compat to 10:
+    + B-D on debhelper (>= 10)
+    + Drop explicit dh-systemd dependency and invocation
+    + Replace --no-restart-on-upgrade with --no-restart-after-upgrade
+      --no-stop-on-upgrade to make up for DH 10 defaults.
+  * B-D on libsystemd-dev and enable sd_notify() support on Linux.
+  * B-D on python3-sphinx instead of python-sphinx.
+  * d/rules: do not call dpkg-parsechangelog directly.
+  * d/copyright: drop obsolete section.
+  * Drop obsolete lintian overrides.
+  * Do a full-service restart when upgrading from pre-1.8 versions and running
+    under systemd, to migrate to the new process model and service type.
+    + Document this in d/NEWS as well.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Tue, 28 Nov 2017 22:25:11 +0200
+
+haproxy (1.7.10-1) unstable; urgency=medium
+
+  * New upstream version release (see CHANGELOG):
+    - BUG/MAJOR: stream-int: don't re-arm recv if send fails
+    - BUG/MAJOR: stream: ensure analysers are always called upon close
+    - BUG/MEDIUM: compression: Fix check on txn in smp_fetch_res_comp_algo
+    - BUG/MEDIUM: connection: remove useless flag CO_FL_DATA_RD_SH
+    - BUG/MEDIUM: deinit: correctly deinitialize the proxy and global
+                  listener tasks
+    - BUG/MEDIUM: deviceatlas: ignore not valuable HTTP request data
+    - BUG/MEDIUM: epoll: ensure we always consider HUP and ERR
+    - BUG/MEDIUM: http: Close streams for connections closed before a
+                  redirect
+    - BUG/MEDIUM: http: Fix a regression bug when a HTTP response is in
+                  TUNNEL mode
+    - BUG/MEDIUM: http: Return an error when url_dec sample converter
+                  failed
+    - BUG/MEDIUM: http: don't automatically forward request close
+    - BUG/MEDIUM: http: don't disable lingering on requests with tunnelled
+                  responses
+    - BUG/MEDIUM: kqueue: Don't bother closing the kqueue after fork.
+    - BUG/MEDIUM: lua: HTTP services must take care of body-less status
+                  codes
+    - BUG/MEDIUM: lua: fix crash when using bogus mode in
+                  register_service()
+    - BUG/MEDIUM: peers: set NOLINGER on the outgoing stream interface
+    - BUG/MEDIUM: prevent buffers being overwritten during build_logline()
+                  execution
+    - BUG/MEDIUM: ssl: fix OCSP expiry calculation
+    - BUG/MEDIUM: stream: don't ignore res.analyse_exp anymore
+    - BUG/MEDIUM: stream: properly set the required HTTP analysers on
+                  use-service
+    - BUG/MEDIUM: tcp-check: don't call tcpcheck_main() from the I/O
+                  handlers!
+    - BUG/MEDIUM: tcp-check: properly indicate polling state before
+                  performing I/O
+    - BUG/MEDIUM: tcp/http: set-dst-port action broken
+  * Fix VERDATE build argument to really use changelog date.
+  * Bump compat to 10.
+  * d/control: B-D on python3-sphinx instead of python-sphinx.
+  * d/control: make haproxy-doc Priority: optional.
+  * d/rules: enable PCRE JIT.
+  * d/rules: use variables defined in *.mk.
+  * d/patches: refresh and replace Wants/After=syslog.service with
+    After=rsyslog.service. Closes: #882610.
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 03 Jan 2018 08:29:48 +0100
+
+haproxy (1.7.9-1) unstable; urgency=medium
+
+  * New upstream version release (see CHANGELOG):
+    - BUG/MAJOR: lua/socket: resources not destroyed when the socket is
+                 aborted
+    - BUG/MEDIUM: lua: bad memory access
+    - BUG/MEDIUM: http: Switch HTTP responses in TUNNEL mode when body
+                  length is undefined
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 19 Aug 2017 12:05:02 +0200
+
+haproxy (1.7.8-1) unstable; urgency=medium
+
+  * New upstream version release (see CHANGELOG):
+    - BUG/MAJOR: cli: fix custom io_release was crushed by NULL.
+    - BUG/MAJOR: compression: Be sure to release the compression state in
+                 all cases
+    - BUG/MAJOR: map: fix segfault during 'show map/acl' on cli.
+    - BUG/MEDIUM: filters: Be sure to call flt_end_analyze for both
+                  channels
+    - BUG/MEDIUM: map/acl: fix unwanted flags inheritance.
+  * Bump Standards-Version to 4.0.0. No changes needed.
+  * Update d/watch to use https.
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 08 Jul 2017 08:24:35 +0200
+
+haproxy (1.7.7-1) unstable; urgency=medium
+
+  * New upstream version release (see CHANGELOG):
+    - BUG/MEDIUM: http: Drop the connection establishment when a redirect
+                  is performed
+    - BUG/MEDIUM: cfgparse: Check if tune.http.maxhdr is in the range
+                  1..32767
+
+ -- Vincent Bernat <bernat@debian.org>  Mon, 26 Jun 2017 14:06:48 +0200
+
+haproxy (1.7.6-1) unstable; urgency=medium
+
+  * New upstream version release (see CHANGELOG):
+    - BUG/MAJOR: Use -fwrapv.
+    - BUG/MAJOR: http: call manage_client_side_cookies() before erasing
+                 the buffer
+    - BUG/MAJOR: server: Segfault after parsing server state file.
+    - BUG/MEDIUM: acl: don't free unresolved args in prune_acl_expr()
+    - BUG/MEDIUM: acl: proprely release unused args in prune_acl_expr()
+    - BUG/MEDIUM: arg: ensure that we properly unlink unresolved arguments
+                  on error
+    - BUG/MEDIUM: lua: memory leak
+    - BUG/MEDIUM: lua: segfault if a converter or a sample doesn't return
+                  anything
+    - BUG/MEDIUM: peers: Peers CLOSE_WAIT issue.
+    - BUG/MEDIUM: unix: never unlink a unix socket from the file system
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 18 Jun 2017 12:34:40 +0200
+
+haproxy (1.7.5-2) unstable; urgency=medium
+
+  * Enable getaddrinfo() support, allowing resolution of hostnames to IPv6
+    addresses (Closes: #862780). Thanks to Anton Eliasson
+    <devel@antoneliasson.se>!
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Wed, 17 May 2017 13:01:45 +0300
+
+haproxy (1.7.5-1) unstable; urgency=medium
+
+  * New upstream version release (see CHANGELOG):
+    - BUG/MEDIUM: peers: fix buffer overflow control in intdecode.
+    - BUG/MEDIUM: buffers: Fix how input/output data are injected into buffers
+    - BUG/MEDIUM: http: Fix blocked HTTP/1.0 responses when compression is
+      enabled
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Tue, 04 Apr 2017 14:25:38 +0300
+
+haproxy (1.7.4-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    - BUG/MAJOR: connection: update CO_FL_CONNECTED before calling the
+                 data layer
+    - BUG/MAJOR: http: fix typo in http_apply_redirect_rule
+    - BUG/MAJOR: stream-int: do not depend on connection flags to detect
+                 connection
+    - BUG/MEDIUM: cli: Prevent double free in CLI ACL lookup
+    - BUG/MEDIUM: connection: ensure to always report the end of handshakes
+    - BUG/MEDIUM: listener: do not try to rebind another process' socket
+    - BUG/MEDIUM: stream: fix client-fin/server-fin handling
+    - BUG/MEDIUM: tcp: don't require privileges to bind to device
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 31 Mar 2017 11:01:14 +0200
+
+haproxy (1.7.3-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    - BUG/MAJOR: lua segmentation fault when the request is like 'GET
+                 ?arg=val HTTP/1.1'
+    - BUG/MAJOR: dns: restart sockets after fork()
+    - BUG/MEDIUM: tcp: don't poll for write when connect() succeeds
+    - BUG/MEDIUM: http: prevent redirect from overwriting a buffer
+    - BUG/MEDIUM: filters: Do not truncate HTTP response when body length
+                  is undefined
+    - BUG/MEDIUM: http: Prevent replace-header from overwriting a buffer
+    - BUG/MEDIUM: config: reject anything but "if" or "unless" after a
+                  use-backend rule
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 01 Mar 2017 20:03:12 +0100
+
+haproxy (1.7.2-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    + Fix a regression whereby fragmented requests were randomly flagged as
+      bad requests depending on previous buffer contents; this was noticable
+      under low load with authenticated requests.
+    + Fix dynamic address resolution for IPv6-only hosts.
+    + Make sure SSL sessions are not reused when the SNI changes. This makes
+      SNI and SSL health checks play nice together.
+    + Minor improvements:
+      - Add the ability to perform actions on multiple servers via the stats
+        page.
+      - Add the ability to specify a custom HTTP reason field in generated
+        responses.
+      - New sample fetch function, `fc_rcvd_proxy', indicating wheter the
+        PROXY protocol was used on the frontend for a connection or not.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Fri, 13 Jan 2017 14:49:05 +0200
+
+haproxy (1.7.1-1) unstable; urgency=medium
+
+  * New upstream stable release.
+  * Upload to unstable.
+  * Notable new features since 1.6:
+    + SPOE (stream processing offload engine) : ability to delegate some
+      slow, unreliable or dangerous processing to external processes.
+    + More statistics in the CSV output.
+    + Support of directories for config files: if the argument to -f
+      is a directory, all files found there are loaded in alphabetical order.
+    + It is now possible to set/unset/preset environment variables directly in
+      the global section and query them through the CLI.
+    + The CLI makes it possible to change a server's address, port, maxconn,
+      check address and port at runtime, without reloading haproxy.
+    + Support for multiple certificates: different certificates for the same
+      domain so that the best one can be picked according to browser support.
+      The main use is to be able to deliver ECDSA certificates to clients
+      supporting them, without breaking compatibility with older clients.
+    + SO_REUSEPORT is now configurable and can be disabled.
+    + Updates to the Lua API, including new classes to access many internal
+      objects like listeners, servers, proxies etc.
+    + Support for a new type of maps consisting of regular expressions with
+      replacement values.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Tue, 13 Dec 2016 12:32:32 +0200
+
+haproxy (1.7.0-1) experimental; urgency=medium
+
+  * New upstream stable series.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Fri, 25 Nov 2016 18:00:55 +0200
+
+haproxy (1.7~dev6-1) experimental; urgency=medium
+
+  * New upstream development release (Closes: #828337)
+  * Upload to experimental
+  * d/watch: look for 1.7
+  * B-D on zlib1g-dev
+  * haproxy: Depend on lsb-base for the initscript
+  * Ship additional plain-text documentation
+  * haproxy-doc: ship HTML version of management.txt
+  * Update the default SSL cipher list and add a link to Mozilla's SSL
+    configuration generator (Closes: #840735)
+  * d/rules: use SUBVERS to pass the Debian revision to HAPROXY_VERSION
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Thu, 10 Nov 2016 16:02:27 +0200
+
+haproxy (1.6.10-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    + Fix retransmits in proxy mode and rare cases of unkillable tasks.
+    + systemd wrapper: do not leave old processes behind when reloading too
+      fast.
+    + systemd wrapper: correctly set the status code.
+    + Fix two bugs in the peers' task management possibly causing some
+      CLOSE_WAIT connection after some rare race conditions.
+    + Make SO_REUSEPORT use configurable via the "-dR" command line switch
+      or the "noreuseport" config option in the global section.
+  * B-D on libssl1.0-dev (Closes: #828337); upstream does not currently
+    support OpenSSL 1.1 for the 1.6 series.
+  * haproxy: depend on lsb-base for the initscript's use of
+    /lib/lsb/init-functions.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Mon, 21 Nov 2016 11:46:16 +0200
+
+haproxy (1.6.9-2) unstable; urgency=medium
+
+  * Enable Linux namespace support.
+  * Pass the full Debian version and package release date from d/changelog to
+    the build system.
+  * initscript: reorder the reload command arguments to always parse EXTRAOPTS
+    properly.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Wed, 28 Sep 2016 10:45:43 +0300
+
+haproxy (1.6.9-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    + BUG/MAJOR: stream: properly mark the server address as unset on
+      connect retry
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 31 Aug 2016 07:44:27 +0200
+
+haproxy (1.6.8-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    + BUG/MAJOR: compression: initialize avail_in/next_in even during
+      flush
+    + BUG/MAJOR: server: the "sni" directive could randomly cause trouble
+    + BUG/MAJOR: stick-counters: possible crash when using sc_trackers
+      with wrong table
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 14 Aug 2016 14:17:08 +0200
+
+haproxy (1.6.7-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    + BUG/MAJOR: fix use-after-free crash on start
+    + BUG/MEDIUM: dns: fix alignment issues in the DNS response parser
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 14 Jul 2016 08:29:43 +0200
+
+haproxy (1.6.6-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    + BUG/MAJOR: fix listening IP address storage for frontends
+    + BUG/MAJOR: http: fix breakage of "reqdeny" causing random crashes
+    + BUG/MEDIUM: stick-tables: fix breakage in table converters
+    + BUG/MEDIUM: dns: unbreak DNS resolver after header fix
+    + BUG/MEDIUM: stats: show servers state may show an servers from another
+      backend
+    + BUG/MEDIUM: fix risk of segfault with "show tls-keys"
+    + BUG/MEDIUM: sticktables: segfault in some configuration error cases
+    + BUG/MEDIUM: lua: converters doesn't work
+    + BUG/MEDIUM: http: add-header: buffer overwritten
+    + BUG/MEDIUM: external-checks: close all FDs right after the fork()
+    + BUG/MAJOR: external-checks: use asynchronous signal delivery
+  * Drop haproxy.service-check-config-before-reload.patch. Applied
+    upstream.
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 28 Jun 2016 10:13:33 +0200
+
+haproxy (1.6.5-2) unstable; urgency=high
+
+  * Add a patch to fix CVE-2016-5360. Closes: #826869.
+    + BUG/MAJOR: http: fix breakage of "reqdeny" causing random crashes
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 11 Jun 2016 22:23:50 +0200
+
+haproxy (1.6.5-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    + BUG/MAJOR: channel: fix miscalculation of available buffer space
+    + BUG/MAJOR: Fix crash in http_get_fhdr with exactly MAX_HDR_HISTORY
+      headers
+    + BUG/MEDIUM: channel: don't allow to overwrite the reserve until
+      connected
+    + BUG/MEDIUM: channel: fix inconsistent handling of 4GB-1 transfers
+    + BUG/MEDIUM: channel: incorrect polling condition may delay event
+      delivery
+    + BUG/MEDIUM: dns: fix alignment issue when building DNS queries
+    + BUG/MEDIUM: fix maxaccept computation on per-process listeners
+    + BUG/MEDIUM: Fix RFC5077 resumption when more than TLS_TICKETS_NO are
+      present
+    + BUG/MEDIUM: http: fix risk of CPU spikes with pipelined requests from
+      dead client
+    + BUG/MEDIUM: log: fix risk of segfault when logging HTTP fields in TCP
+      mode
+    + BUG/MEDIUM: lua: protects the upper boundary of the argument list for
+      converters/fetches.
+    + BUG/MEDIUM: peers: fix incorrect age in frequency counters
+    + BUG/MEDIUM: sample: initialize the pointer before parse_binary call.
+    + BUG/MEDIUM: stats: show backend may show an empty or incomplete result
+    + BUG/MEDIUM: stats: show servers state may show an empty or incomplete
+      result
+    + BUG/MEDIUM: stick-tables: some sample-fetch doesn't work in the
+      connection state.
+    + BUG/MEDIUM: stream: ensure the SI_FL_DONT_WAKE flag is properly cleared
+    + BUG/MEDIUM: trace.c: rdtsc() is defined in two files
+    + MEDIUM: unblock signals on startup.
+  * Bump standards to 3.9.8; no changes needed.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Wed, 11 May 2016 11:07:24 +0300
+
+haproxy (1.6.4-3) unstable; urgency=medium
+
+  * d/init: remove support for dynamic script name. This enable haproxy to
+    be started on boot.
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 24 Mar 2016 20:36:08 +0100
+
+haproxy (1.6.4-2) unstable; urgency=medium
+
+  * d/init: fix SysV init script w/ respect to handling EXTRAOPTS on check.
+  * d/control: add Pre-Depends for dpkg-maintscript-helper support of
+    dir_to_symlink.
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 19 Mar 2016 16:35:20 +0100
+
+haproxy (1.6.4-1) unstable; urgency=medium
+
+  * New upstream release (see CHANGELOG):
+    + BUG/MAJOR: http-reuse: fix risk of orphaned connections.
+    + BUG/MAJOR: lua: applets can't sleep.
+    + BUG/MAJOR: samples: check smp->strm before using it. 
+    + BUG/MAJOR: servers state: server port is erased when dns resolution is
+      enabled on a server.
+    + BUG/MAJOR: vars: always retrieve the stream and session from the sample
+    + BUG/MEDIUM: buffers: do not round up buffer size during allocation
+    + BUG/MEDIUM: dns: no DNS resolution happens if no ports provided to the
+      nameserver
+    + BUG/MEDIUM: servers state: server port is used uninitialized
+    + BUG/MEDIUM: config: Adding validation to stick-table expire value.
+    + BUG/MEDIUM: sample: http_date() doesn't provide the right day of the
+      week
+    + BUG/MEDIUM: channel: fix miscalculation of available buffer space.
+    + BUG/MEDIUM: http-reuse: do not share private connections across backends
+    + BUG/MEDIUM: ssl: fix off-by-one in ALPN list allocation
+    + BUG/MEDIUM: ssl: fix off-by-one in NPN list allocation
+    + BUG/MEDIUM: stats: stats bind-process doesn't propagate the process mask
+      correctly
+    + BUG/MEDIUM: chunks: always reject negative-length chunks
+    + BUG/MEDIUM: cfgparse: wrong argument offset after parsing server "sni"
+      keyword
+
+  [ Vincent Bernat ]
+  * haproxy.init: append ${EXTRAOPTS} when verifying configuration file.
+  * haproxy.init: move EXTRAOPTS after all other parameters.
+  * haproxy.init: management of multiple HAProxy instances with SysV
+    init.d script, courtesy of Ivan Savcic.
+
+  [ Apollon Oikonomopoulos ]
+  * Bump standards to 3.9.7:
+    + haproxy-doc: move the additional documentation from
+      /usr/share/doc/haproxy-doc to /usr/share/doc/haproxy, as per the
+      recommendation in Policy Â§12.3.
+    + Add compatibility symlinks from /usr/share/doc/haproxy-doc to
+      /usr/share/doc/haproxy.
+  * Enable all hardening flags.
+  * d/control: use HTTPS for Vcs-*
+  * Use www.haproxy.org as the project's homepage in d/control and
+    d/copyright.
+  * d/copyright: adjust debian/* years.
+  * Add basic DEP-8 tests.
+  * Drop the haproxy-dbg binary package in favor of ddebs.
+  * haproxy-doc:
+    + Use dpkg-maintscript-helper dir_to_symlink for the compatibility
+      symlinks.
+    + Add Lua documentation doc-base entry.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Tue, 15 Mar 2016 21:04:11 +0200
+
+haproxy (1.6.3-1) unstable; urgency=medium
+
+  [ Apollon Oikonomopoulos ]
+  * haproxy.init: use s-s-d's --pidfile option.
+    Thanks to Louis Bouchard (Closes: 804530)
+
+  [ Vincent Bernat ]
+  * watch: fix d/watch to look for 1.6 version
+  * Imported Upstream version 1.6.3
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 31 Dec 2015 08:10:10 +0100
+
+haproxy (1.6.2-2) unstable; urgency=medium
+
+  * Enable USE_REGPARM on amd64 as well.
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 03 Nov 2015 21:21:30 +0100
+
+haproxy (1.6.2-1) unstable; urgency=medium
+
+  * New upstream release.
+    - BUG/MAJOR: dns: first DNS response packet not matching queried
+                      hostname may lead to a loop
+    - BUG/MAJOR: http: don't requeue an idle connection that is already
+                       queued
+  * Upload to unstable.
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 03 Nov 2015 13:36:22 +0100
+
+haproxy (1.6.1-2) experimental; urgency=medium
+
+  * Build the Lua manpage in -arch, fixes FTBFS in binary-only builds.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Thu, 22 Oct 2015 12:19:41 +0300
+
+haproxy (1.6.1-1) experimental; urgency=medium
+
+  [ Vincent Bernat ]
+  * New upstream release.
+    - BUG/MAJOR: ssl: free the generated SSL_CTX if the LRU cache is
+                      disabled
+  * Drop 0001-BUILD-install-only-relevant-and-existing-documentati.patch.
+
+  [ Apollon Oikonomopoulos ]
+  * Ship and generate Lua API documentation.
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 22 Oct 2015 10:45:55 +0200
+
+haproxy (1.6.0+ds1-1) experimental; urgency=medium
+
+  * New upstream release!
+  * Add a patch to fix documentation installation:
+    + 0001-BUILD-install-only-relevant-and-existing-documentati.patch
+  * Update HAProxy documentation converter to a more recent version.
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 14 Oct 2015 17:29:19 +0200
+
+haproxy (1.6~dev7-1) experimental; urgency=medium
+
+  * New upstream release.
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 06 Oct 2015 16:01:26 +0200
+
+haproxy (1.6~dev5-1) experimental; urgency=medium
+
+  * New upstream release.
+
+ -- Vincent Bernat <bernat@debian.org>  Mon, 14 Sep 2015 15:50:28 +0200
+
+haproxy (1.6~dev4-1) experimental; urgency=medium
+
+  * New upstream release.
+  * Refresh debian/copyright.
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 30 Aug 2015 23:54:10 +0200
+
+haproxy (1.6~dev3-1) experimental; urgency=medium
+
+  * New upstream release.
+  * Enable Lua support.
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 15 Aug 2015 17:51:29 +0200
+
+haproxy (1.5.15-1) unstable; urgency=medium
+
+  * New upstream stable release including the following fix:
+    - BUG/MAJOR: http: don't call http_send_name_header() after an error
+
+ -- Vincent Bernat <bernat@debian.org>  Mon, 02 Nov 2015 07:34:19 +0100
+
+haproxy (1.5.14-1) unstable; urgency=high
+
+  * New upstream version. Fix an information leak (CVE-2015-3281):
+    - BUG/MAJOR: buffers: make the buffer_slow_realign() function
+                 respect output data.
+  * Add $named as a dependency for init script. Closes: #790638.
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 03 Jul 2015 19:49:02 +0200
+
+haproxy (1.5.13-1) unstable; urgency=medium
+
+  * New upstream stable release including the following fixes:
+    - MAJOR: peers: allow peers section to be used with nbproc > 1
+    - BUG/MAJOR: checks: always check for end of list before proceeding
+    - MEDIUM: ssl: replace standards DH groups with custom ones
+    - BUG/MEDIUM: ssl: fix tune.ssl.default-dh-param value being overwritten
+    - BUG/MEDIUM: cfgparse: segfault when userlist is misused
+    - BUG/MEDIUM: stats: properly initialize the scope before dumping stats
+    - BUG/MEDIUM: http: don't forward client shutdown without NOLINGER
+                  except for tunnels
+    - BUG/MEDIUM: checks: do not dereference head of a tcp-check at the end
+    - BUG/MEDIUM: checks: do not dereference a list as a tcpcheck struct
+    - BUG/MEDIUM: peers: apply a random reconnection timeout
+    - BUG/MEDIUM: config: properly compute the default number of processes
+                  for a proxy
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 27 Jun 2015 20:52:07 +0200
+
+haproxy (1.5.12-1) unstable; urgency=medium
+
+  * New upstream stable release including the following fixes:
+    - BUG/MAJOR: http: don't read past buffer's end in http_replace_value
+    - BUG/MAJOR: http: prevent risk of reading past end with balance
+                 url_param
+    - BUG/MEDIUM: Do not consider an agent check as failed on L7 error
+    - BUG/MEDIUM: patern: some entries are not deleted with case
+                  insensitive match
+    - BUG/MEDIUM: buffer: one byte miss in buffer free space check
+    - BUG/MEDIUM: http: thefunction "(req|res)-replace-value" doesn't
+                  respect the HTTP syntax
+    - BUG/MEDIUM: peers: correctly configure the client timeout
+    - BUG/MEDIUM: http: hdr_cnt would not count any header when called
+                  without name
+    - BUG/MEDIUM: listener: don't report an error when resuming unbound
+                  listeners
+    - BUG/MEDIUM: init: don't limit cpu-map to the first 32 processes only
+    - BUG/MEDIUM: stream-int: always reset si->ops when si->end is
+                  nullified
+    - BUG/MEDIUM: http: remove content-length from chunked messages
+    - BUG/MEDIUM: http: do not restrict parsing of transfer-encoding to
+                  HTTP/1.1
+    - BUG/MEDIUM: http: incorrect transfer-coding in the request is a bad
+                  request
+    - BUG/MEDIUM: http: remove content-length form responses with bad
+                  transfer-encoding
+    - BUG/MEDIUM: http: wait for the exact amount of body bytes in
+                  wait_for_request_body
+
+ -- Vincent Bernat <bernat@debian.org>  Sat, 02 May 2015 16:38:28 +0200
+
+haproxy (1.5.11-2) unstable; urgency=medium
+
+  * Upload to unstable.
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 26 Apr 2015 17:46:58 +0200
+
+haproxy (1.5.11-1) experimental; urgency=medium
+
+  * New upstream stable release including the following fixes:
+    - BUG/MAJOR: log: don't try to emit a log if no logger is set
+    - BUG/MEDIUM: backend: correctly detect the domain when
+                  use_domain_only is used
+    - BUG/MEDIUM: Do not set agent health to zero if server is disabled
+                  in config
+    - BUG/MEDIUM: Only explicitly report "DOWN (agent)" if the agent health
+                  is zero
+    - BUG/MEDIUM: http: fix header removal when previous header ends with
+                  pure LF
+    - BUG/MEDIUM: channel: fix possible integer overflow on reserved size
+                  computation
+    - BUG/MEDIUM: channel: don't schedule data in transit for leaving until
+                  connected
+    - BUG/MEDIUM: http: make http-request set-header compute the string
+                  before removal
+  * Upload to experimental.
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 01 Feb 2015 09:22:27 +0100
+
+haproxy (1.5.10-1) experimental; urgency=medium
+
+  * New upstream stable release including the following fixes:
+      - BUG/MAJOR: stream-int: properly check the memory allocation return
+      - BUG/MEDIUM: sample: fix random number upper-bound
+      - BUG/MEDIUM: patterns: previous fix was incomplete
+      - BUG/MEDIUM: payload: ensure that a request channel is available
+      - BUG/MEDIUM: tcp-check: don't rely on random memory contents
+      - BUG/MEDIUM: tcp-checks: disable quick-ack unless next rule is an expect
+      - BUG/MEDIUM: config: do not propagate processes between stopped
+                    processes
+      - BUG/MEDIUM: memory: fix freeing logic in pool_gc2()
+      - BUG/MEDIUM: compression: correctly report zlib_mem
+  * Upload to experimental.
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 04 Jan 2015 13:17:56 +0100
+
+haproxy (1.5.9-1) experimental; urgency=medium
+
+  * New upstream stable release including the following fixes:
+      - BUG/MAJOR: sessions: unlink session from list on out
+                   of memory
+      - BUG/MEDIUM: pattern: don't load more than once a pattern
+                    list.
+      - BUG/MEDIUM: connection: sanitize PPv2 header length before
+                    parsing address information
+      - BUG/MAJOR: frontend: initialize capture pointers earlier
+      - BUG/MEDIUM: checks: fix conflicts between agent checks and
+                    ssl healthchecks
+      - BUG/MEDIUM: ssl: force a full GC in case of memory shortage
+      - BUG/MEDIUM: ssl: fix bad ssl context init can cause
+                                 segfault in case of OOM.
+  * Upload to experimental.
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 07 Dec 2014 16:37:36 +0100
+
+haproxy (1.5.8-3) unstable; urgency=medium
+
+  * Remove RC4 from the default cipher string shipped in configuration.
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 27 Feb 2015 11:29:23 +0100
+
+haproxy (1.5.8-2) unstable; urgency=medium
+
+  * Cherry-pick the following patches from 1.5.9 release:
+      - 8a0b93bde77e BUG/MAJOR: sessions: unlink session from list on out
+                                of memory
+      - bae03eaad40a BUG/MEDIUM: pattern: don't load more than once a pattern
+                                 list.
+      - 93637b6e8503 BUG/MEDIUM: connection: sanitize PPv2 header length before
+                                 parsing address information
+      - 8ba50128832b BUG/MAJOR: frontend: initialize capture pointers earlier
+      - 1f96a87c4e14 BUG/MEDIUM: checks: fix conflicts between agent checks and
+                                 ssl healthchecks
+      - 9bcc01ae2598 BUG/MEDIUM: ssl: force a full GC in case of memory shortage
+      - 909514970089 BUG/MEDIUM: ssl: fix bad ssl context init can cause
+                                 segfault in case of OOM.
+  * Cherry-pick the following patches from future 1.5.10 release:
+      - 1e89acb6be9b BUG/MEDIUM: payload: ensure that a request channel is
+                                 available
+      - bad3c6f1b6d7 BUG/MEDIUM: patterns: previous fix was incomplete
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 07 Dec 2014 11:11:21 +0100
+
+haproxy (1.5.8-1) unstable; urgency=medium
+
+  * New upstream stable release including the following fixes:
+
+     + BUG/MAJOR: buffer: check the space left is enough or not when input
+                  data in a buffer is wrapped
+     + BUG/MINOR: ssl: correctly initialize ssl ctx for invalid certificates
+     + BUG/MEDIUM: tcp: don't use SO_ORIGINAL_DST on non-AF_INET sockets
+     + BUG/MEDIUM: regex: fix pcre_study error handling
+     + BUG/MEDIUM: tcp: fix outgoing polling based on proxy protocol
+     + BUG/MINOR: log: fix request flags when keep-alive is enabled
+     + BUG/MAJOR: cli: explicitly call cli_release_handler() upon error
+     + BUG/MEDIUM: http: don't dump debug headers on MSG_ERROR
+  * Also includes the following new features:
+     + MINOR: ssl: add statement to force some ssl options in global.
+     + MINOR: ssl: add fetchs 'ssl_c_der' and 'ssl_f_der' to return DER
+              formatted certs
+  * Disable SSLv3 in the default configuration file.
+
+ -- Vincent Bernat <bernat@debian.org>  Fri, 31 Oct 2014 13:48:19 +0100
+
+haproxy (1.5.6-1) unstable; urgency=medium
+
+  * New upstream stable release including the following fixes:
+    + BUG/MEDIUM: systemd: set KillMode to 'mixed'
+    + MINOR: systemd: Check configuration before start
+    + BUG/MEDIUM: config: avoid skipping disabled proxies
+    + BUG/MINOR: config: do not accept more track-sc than configured
+    + BUG/MEDIUM: backend: fix URI hash when a query string is present
+  * Drop systemd patches:
+    + haproxy.service-also-check-on-start.patch
+    + haproxy.service-set-killmode-to-mixed.patch
+  * Refresh other patches.
+
+ -- Vincent Bernat <bernat@debian.org>  Mon, 20 Oct 2014 18:10:21 +0200
+
+haproxy (1.5.5-1) unstable; urgency=medium
+
+  [ Vincent Bernat ]
+  * initscript: use start-stop-daemon to reliably terminate all haproxy
+    processes. Also treat stopping a non-running haproxy as success.
+    (Closes: #762608, LP: #1038139)
+
+  [ Apollon Oikonomopoulos ]
+  * New upstream stable release including the following fixes:
+    + DOC: Address issue where documentation is excluded due to a gitignore
+      rule.
+    + MEDIUM: Improve signal handling in systemd wrapper.
+    + BUG/MINOR: config: don't propagate process binding for dynamic
+      use_backend
+    + MINOR: Also accept SIGHUP/SIGTERM in systemd-wrapper
+    + DOC: clearly state that the "show sess" output format is not fixed
+    + MINOR: stats: fix minor typo fix in stats_dump_errors_to_buffer()
+    + DOC: indicate in the doc that track-sc* can wait if data are missing
+    + MEDIUM: http: enable header manipulation for 101 responses
+    + BUG/MEDIUM: config: propagate frontend to backend process binding again.
+    + MEDIUM: config: properly propagate process binding between proxies
+    + MEDIUM: config: make the frontends automatically bind to the listeners'
+      processes
+    + MEDIUM: config: compute the exact bind-process before listener's
+      maxaccept
+    + MEDIUM: config: only warn if stats are attached to multi-process bind
+      directives
+    + MEDIUM: config: report it when tcp-request rules are misplaced
+    + MINOR: config: detect the case where a tcp-request content rule has no
+      inspect-delay
+    + MEDIUM: systemd-wrapper: support multiple executable versions and names
+    + BUG/MEDIUM: remove debugging code from systemd-wrapper
+    + BUG/MEDIUM: http: adjust close mode when switching to backend
+    + BUG/MINOR: config: don't propagate process binding on fatal errors.
+    + BUG/MEDIUM: check: rule-less tcp-check must detect connect failures
+    + BUG/MINOR: tcp-check: report the correct failed step in the status
+    + DOC: indicate that weight zero is reported as DRAIN
+  * Add a new patch (haproxy.service-set-killmode-to-mixed.patch) to fix the
+    systemctl stop action conflicting with the systemd wrapper now catching
+    SIGTERM.
+  * Bump standards to 3.9.6; no changes needed.
+  * haproxy-doc: link to tracker.debian.org instead of packages.qa.debian.org.
+  * d/copyright: move debian/dconv/* paragraph after debian/*, so that it
+    actually matches the files it is supposed to.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Wed, 08 Oct 2014 12:34:53 +0300
+
+haproxy (1.5.4-1) unstable; urgency=high
+
+  * New upstream version.
+    + Fix a critical bug that, under certain unlikely conditions, allows a
+      client to crash haproxy.
+  * Prefix rsyslog configuration file to ensure to log only to
+    /var/log/haproxy. Thanks to Paul Bourke for the patch.
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 02 Sep 2014 19:14:38 +0200
+
+haproxy (1.5.3-1) unstable; urgency=medium
+
+  * New upstream stable release, fixing the following issues:
+    + Memory corruption when building a proxy protocol v2 header
+    + Memory leak in SSL DHE key exchange
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Fri, 25 Jul 2014 10:41:36 +0300
+
+haproxy (1.5.2-1) unstable; urgency=medium
+
+  * New upstream stable release. Important fixes:
+    + A few sample fetch functions when combined in certain ways would return
+      malformed results, possibly crashing the HAProxy process.
+    + Hash-based load balancing and http-send-name-header would fail for
+      requests which contain a body which starts to be forwarded before the
+      data is used.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Mon, 14 Jul 2014 00:42:32 +0300
+
+haproxy (1.5.1-1) unstable; urgency=medium
+
+  * New upstream stable release:
+    + Fix a file descriptor leak for clients that disappear before connecting.
+    + Do not staple expired OCSP responses.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Tue, 24 Jun 2014 12:56:30 +0300
+
+haproxy (1.5.0-1) unstable; urgency=medium
+
+  * New upstream stable series. Notable changes since the 1.4 series:
+    + Native SSL support on both sides with SNI/NPN/ALPN and OCSP stapling.
+    + IPv6 and UNIX sockets are supported everywhere
+    + End-to-end HTTP keep-alive for better support of NTLM and improved
+      efficiency in static farms
+    + HTTP/1.1 response compression (deflate, gzip) to save bandwidth
+    + PROXY protocol versions 1 and 2 on both sides
+    + Data sampling on everything in request or response, including payload
+    + ACLs can use any matching method with any input sample
+    + Maps and dynamic ACLs updatable from the CLI
+    + Stick-tables support counters to track activity on any input sample
+    + Custom format for logs, unique-id, header rewriting, and redirects
+    + Improved health checks (SSL, scripted TCP, check agent, ...)
+    + Much more scalable configuration supports hundreds of thousands of
+      backends and certificates without sweating
+
+  * Upload to unstable, merge all 1.5 work from experimental. Most important
+    packaging changes since 1.4.25-1 include:
+    + systemd support.
+    + A more sane default config file.
+    + Zero-downtime upgrades between 1.5 releases by gracefully reloading
+      HAProxy during upgrades.
+    + HTML documentation shipped in the haproxy-doc package.
+    + kqueue support for kfreebsd.
+
+  * Packaging changes since 1.5~dev26-2:
+    + Drop patches merged upstream:
+      o Fix-reference-location-in-manpage.patch
+      o 0001-BUILD-stats-workaround-stupid-and-bogus-Werror-forma.patch
+    + d/watch: look for stable 1.5 releases
+    + systemd: respect CONFIG and EXTRAOPTS when specified in
+      /etc/default/haproxy.
+    + initscript: test the configuration before start or reload.
+    + initscript: remove the ENABLED flag and logic.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Fri, 20 Jun 2014 11:05:17 +0300
+
+haproxy (1.5~dev26-2) experimental; urgency=medium
+
+  * initscript: start should not fail when haproxy is already running
+    + Fixes upgrades from post-1.5~dev24-1 installations
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Wed, 04 Jun 2014 13:20:39 +0300
+
+haproxy (1.5~dev26-1) experimental; urgency=medium
+
+  * New upstream development version.
+     + Add a patch to fix compilation with -Werror=format-security
+
+ -- Vincent Bernat <bernat@debian.org>  Wed, 28 May 2014 20:32:10 +0200
+
+haproxy (1.5~dev25-1) experimental; urgency=medium
+
+  [ Vincent Bernat ]
+  * New upstream development version.
+  * Rename "contimeout", "clitimeout" and "srvtimeout" in the default
+    configuration file to "timeout connection", "timeout client" and
+    "timeout server".
+
+  [ Apollon Oikonomopoulos ]
+  * Build on kfreebsd using the "freebsd" target; enables kqueue support.
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 15 May 2014 00:20:11 +0200
+
+haproxy (1.5~dev24-2) experimental; urgency=medium
+
+  * New binary package: haproxy-doc
+    + Contains the HTML documentation built using a version of Cyril Bonté's
+      haproxy-dconv (https://github.com/cbonte/haproxy-dconv).
+    + Add Build-Depends-Indep on python and python-mako
+    + haproxy Suggests: haproxy-doc
+  * systemd: check config file for validity on reload.
+  * haproxy.cfg:
+    + Enable the stats socket by default and bind it to
+      /run/haproxy/admin.sock, which is accessible by the haproxy group.
+      /run/haproxy creation is handled by the initscript for sysv-rc and a
+      tmpfiles.d config for systemd.
+    + Set the default locations for CA and server certificates to
+      /etc/ssl/certs and /etc/ssl/private respectively.
+    + Set the default cipher list to be used on listening SSL sockets to
+      enable PFS, preferring ECDHE ciphers by default.
+  * Gracefully reload HAProxy on upgrade instead of performing a full restart.
+  * debian/rules: split build into binary-arch and binary-indep.
+  * Build-depend on debhelper >= 9, set compat to 9.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Sun, 27 Apr 2014 13:37:17 +0300
+
+haproxy (1.5~dev24-1) experimental; urgency=medium
+
+  * New upstream development version, fixes major regressions introduced in
+    1.5~dev23:
+
+    + Forwarding of a message body (request or response) would automatically
+      stop after the transfer timeout strikes, and with no error.
+    + Redirects failed to update the msg->next offset after consuming the
+      request, so if they were made with keep-alive enabled and starting with
+      a slash (relative location), then the buffer was shifted by a negative
+      amount of data, causing a crash.
+    + The code to standardize DH parameters caused an important performance
+      regression for, so it was temporarily reverted for the time needed to
+      understand the cause and to fix it.
+
+    For a complete release announcement, including other bugfixes and feature
+    enhancements, see http://deb.li/yBVA.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Sun, 27 Apr 2014 11:09:37 +0300
+
+haproxy (1.5~dev23-1) experimental; urgency=medium
+
+  * New upstream development version; notable changes since 1.5~dev22:
+    + SSL record size optimizations to speed up both, small and large
+      transfers.
+    + Dynamic backend name support in use_backend.
+    + Compressed chunked transfer encoding support.
+    + Dynamic ACL manipulation via the CLI.
+    + New "language" converter for extracting language preferences from
+      Accept-Language headers.
+  * Remove halog source and systemd unit files from
+    /usr/share/doc/haproxy/contrib, they are built and shipped in their
+    appropriate locations since 1.5~dev19-2.
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Wed, 23 Apr 2014 11:12:34 +0300
+
+haproxy (1.5~dev22-1) experimental; urgency=medium
+
+  * New upstream development version
+  * watch: use the source page and not the main one
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Mon, 03 Feb 2014 17:45:51 +0200
+
+haproxy (1.5~dev21+20140118-1) experimental; urgency=medium
+
+  * New upstream development snapshot, with the following fixes since
+    1.5-dev21:
+     + 00b0fb9 BUG/MAJOR: ssl: fix breakage caused by recent fix abf08d9
+     + 410f810 BUG/MEDIUM: map: segmentation fault with the stats's socket
+                           command "set map ..."
+     + abf08d9 BUG/MAJOR: connection: fix mismatch between rcv_buf's API and
+                          usage
+     + 35249cb BUG/MINOR: pattern: pattern comparison executed twice
+     + c920096 BUG/MINOR: http: don't clear the SI_FL_DONT_WAKE flag between
+                          requests
+     + b800623 BUG/MEDIUM: stats: fix HTTP/1.0 breakage introduced in previous
+                           patch
+     + 61f7f0a BUG/MINOR: stream-int: do not clear the owner upon unregister
+     + 983eb31 BUG/MINOR: channel: CHN_INFINITE_FORWARD must be unsigned
+     + a3ae932 BUG/MEDIUM: stats: the web interface must check the tracked
+                           servers before enabling
+     + e24d963 BUG/MEDIUM: checks: unchecked servers could not be enabled
+                           anymore
+     + 7257550 BUG/MINOR: http: always disable compression on HTTP/1.0
+     + 9f708ab BUG/MINOR: checks: successful check completion must not
+                          re-enable MAINT servers
+     + ff605db BUG/MEDIUM: backend: do not re-initialize the connection's
+                           context upon reuse
+     + ea90063 BUG/MEDIUM: stream-int: fix the keep-alive idle connection
+                           handler
+  * Update debian/copyright to reflect the license of ebtree/
+    (closes: #732614)
+  * Synchronize debian/copyright with source
+  * Add Documentation field to the systemd unit file
+
+ -- Apollon Oikonomopoulos <apoikos@debian.org>  Mon, 20 Jan 2014 10:07:34 +0200
+
+haproxy (1.5~dev21-1) experimental; urgency=low
+
+  [ Prach Pongpanich ]
+  * Bump Standards-Version to 3.9.5
+
+  [ Thomas Bechtold ]
+  * debian/control: Add haproxy-dbg binary package for debug symbols.
+
+  [ Apollon Oikonomopoulos ]
+  * New upstream development version.
+  * Require syslog to be operational before starting. Closes: #726323.
+
+ -- Vincent Bernat <bernat@debian.org>  Tue, 17 Dec 2013 01:38:04 +0700
+
+haproxy (1.5~dev19-2) experimental; urgency=low
+
+  [ Vincent Bernat ]
+  * Really enable systemd support by using dh-systemd helper.
+  * Don't use -L/usr/lib and rely on default search path. Closes: #722777.
+  
+  [ Apollon Oikonomopoulos ]
+  * Ship halog.
+
+ -- Vincent Bernat <bernat@debian.org>  Thu, 12 Sep 2013 21:58:05 +0200
+
+haproxy (1.5~dev19-1) experimental; urgency=high
+
+  [ Vincent Bernat ]
+  * New upstream version.
+     + CVE-2013-2175: fix a possible crash when using negative header
+       occurrences.
+     + Drop 0002-Fix-typo-in-src-haproxy.patch: applied upstream.
+  * Enable gzip compression feature.
+
+  [ Prach Pongpanich ]
+  * Drop bashism patch. It seems useless to maintain a patch to convert
+    example scripts from /bin/bash to /bin/sh.
+  * Fix reload/restart action of init script (LP: #1187469)
+  
+ -- Vincent Bernat <bernat@debian.org>  Mon, 17 Jun 2013 22:03:58 +0200
+
+haproxy (1.5~dev18-1) experimental; urgency=low
+
+  [ Apollon Oikonomopoulos ]
+  * New upstream development version
+
+  [ Vincent Bernat ]
+  * Add support for systemd. Currently, /etc/default/haproxy is not used
+    when using systemd.
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 26 May 2013 12:33:00 +0200
+
+haproxy (1.4.25-1) unstable; urgency=medium
+
+  [ Prach Pongpanich ]
+  * New upstream version.
+  * Update watch file to use the source page.
+  * Bump Standards-Version to 3.9.5.
+
+  [ Thomas Bechtold ]
+  * debian/control: Add haproxy-dbg binary package for debug symbols.
+
+  [ Apollon Oikonomopoulos ]
+  * Require syslog to be operational before starting. Closes: #726323.
+  * Document how to bind non-local IPv6 addresses.
+  * Add a reference to configuration.txt.gz to the manpage.
+  * debian/copyright: synchronize with source.
+
+ -- Prach Pongpanich <prachpub@gmail.com>  Fri, 28 Mar 2014 09:35:09 +0700
+
+haproxy (1.4.24-2) unstable; urgency=low
+
+  [ Apollon Oikonomopoulos ]
+  * Ship contrib/halog as /usr/bin/halog.
+  
+  [ Vincent Bernat ]
+  * Don't use -L/usr/lib and rely on default search path. Closes: #722777.
+
+ -- Vincent Bernat <bernat@debian.org>  Sun, 15 Sep 2013 14:36:27 +0200
+
+haproxy (1.4.24-1) unstable; urgency=high
+
+  [ Vincent Bernat ]
+  * New upstream version.
+     + CVE-2013-2175: fix a possible crash when using negative header
+       occurrences.
+
+  [ Prach Pongpanich ]
+  * Drop bashism patch. It seems useless to maintain a patch to convert
+    example scripts from /bin/bash to /bin/sh.
+  * Fix reload/restart action of init script (LP: #1187469).
+
+ -- Vincent Bernat <bernat@debian.org>  Mon, 17 Jun 2013 21:56:26 +0200
+
+haproxy (1.4.23-1) unstable; urgency=low
+
+  [ Apollon Oikonomopoulos ]
+  * New upstream version (Closes: #643650, #678953)
+     + This fixes CVE-2012-2942 (Closes: #674447)
+     + This fixes CVE-2013-1912 (Closes: #704611)
+  * Ship vim addon as vim-haproxy (Closes: #702893)
+  * Check for the configuration file after sourcing /etc/default/haproxy
+    (Closes: #641762)
+  * Use /dev/log for logging by default (Closes: #649085)
+
+  [ Vincent Bernat ]
+  * debian/control:
+     + add Vcs-* fields
+     + switch maintenance to Debian HAProxy team. (Closes: #706890)
+     + drop dependency to quilt: 3.0 (quilt) format is in use.
+  * debian/rules:
+     + don't explicitly call dh_installchangelog.
+     + use dh_installdirs to install directories.
+     + use dh_install to install error and configuration files.
+     + switch to `linux2628` Makefile target for Linux.
+  * debian/postrm:
+     + remove haproxy user and group on purge.
+  * Ship a more minimal haproxy.cfg file: no `listen` blocks but `global`
+    and `defaults` block with appropriate configuration to use chroot and
+    logging in the expected way.
+
+  [ Prach Pongpanich ]
+  * debian/copyright:
+     + add missing copyright holders
+     + update years of copyright
+  * debian/rules:
+     + build with -Wl,--as-needed to get rid of unnecessary depends
+  * Remove useless files in debian/haproxy.{docs,examples}
+  * Update debian/watch file, thanks to Bart Martens
+
+ -- Vincent Bernat <bernat@debian.org>  Mon, 06 May 2013 20:02:14 +0200
+
+haproxy (1.4.15-1) unstable; urgency=low
+
+  * New upstream release with critical bug fix (Closes: #631351)
+
+ -- Christo Buschek <crito@30loops.net>  Thu, 14 Jul 2011 18:17:05 +0200
+
+haproxy (1.4.13-1) unstable; urgency=low
+
+  * New maintainer upload (Closes: #615246)
+  * New upstream release
+  * Standards-version goes 3.9.1 (no change)
+  * Added patch bashism (Closes: #581109)
+  * Added a README.source file.
+
+ -- Christo Buschek <crito@30loops.net>  Thu, 11 Mar 2011 12:41:59 +0000
+
+haproxy (1.4.8-1) unstable; urgency=low
+
+  * New upstream release.
+
+ -- Arnaud Cornet <acornet@debian.org>  Fri, 18 Jun 2010 00:42:53 +0100
+
+haproxy (1.4.4-1) unstable; urgency=low
+
+  * New upstream release
+  * Add splice and tproxy support
+  * Add regparm optimization on i386
+  * Switch to dpkg-source 3.0 (quilt) format
+
+ -- Arnaud Cornet <acornet@debian.org>  Thu, 15 Apr 2010 20:00:34 +0100
+
+haproxy (1.4.2-1) unstable; urgency=low
+
+  * New upstream release
+  * Remove debian/patches/haproxy.1-hyphen.patch gone upstream
+  * Tighten quilt build dep (Closes: #567087)
+  * standards-version goes 3.8.4 (no change)
+  * Add $remote_fs to init.d script required start and stop
+
+ -- Arnaud Cornet <acornet@debian.org>  Sat, 27 Mar 2010 15:19:48 +0000
+
+haproxy (1.3.22-1) unstable; urgency=low
+
+  * New upstream bugfix release
+
+ -- Arnaud Cornet <acornet@debian.org>  Mon, 19 Oct 2009 22:31:45 +0100
+
+haproxy (1.3.21-1) unstable; urgency=low
+
+  [ Michael Shuler ]
+  * New Upstream Version (Closes: #538992)
+  * Added override for example shell scripts in docs (Closes: #530096)
+  * Added upstream changelog to docs
+  * Added debian/watch
+  * Updated debian/copyright format
+  * Added haproxy.1-hyphen.patch, to fix hyphen in man page
+  * Upgrade Standards-Version to 3.8.3 (no change needed)
+  * Upgrade debian/compat to 7 (no change needed)
+
+  [ Arnaud Cornet ]
+  * New upstream version.
+  * Merge Michael's work, few changelog fixes
+  * Add debian/README.source to point to quilt doc
+  * Depend on debhelper >= 7.0.50~ and use overrides in debian/rules
+
+ -- Arnaud Cornet <acornet@debian.org>  Sun, 18 Oct 2009 14:01:29 +0200
+
+haproxy (1.3.18-1) unstable; urgency=low
+
+  * New Upstream Version (Closes: #534583).
+  * Add contrib directory in docs
+
+ -- Arnaud Cornet <acornet@debian.org>  Fri, 26 Jun 2009 00:11:01 +0200
+
+haproxy (1.3.15.7-2) unstable; urgency=low
+
+  * Fix build without debian/patches directory (Closes: #515682) using
+    /usr/share/quilt/quilt.make.
+
+ -- Arnaud Cornet <acornet@debian.org>  Tue, 17 Feb 2009 08:55:12 +0100
+
+haproxy (1.3.15.7-1) unstable; urgency=low
+
+  * New Upstream Version.
+  * Remove upstream patches:
+  -use_backend-consider-unless.patch
+  -segfault-url_param+check_post.patch
+  -server-timeout.patch
+  -closed-fd-remove.patch
+  -connection-slot-during-retry.patch
+  -srv_dynamic_maxconn.patch
+  -do-not-pause-backends-on-reload.patch
+  -acl-in-default.patch
+  -cookie-capture-check.patch
+  -dead-servers-queue.patch
+
+ -- Arnaud Cornet <acornet@debian.org>  Mon, 16 Feb 2009 11:20:21 +0100
+
+haproxy (1.3.15.2-2~lenny1) testing-proposed-updates; urgency=low
+
+  * Rebuild for lenny to circumvent pcre3 shlibs bump.
+
+ -- Arnaud Cornet <acornet@debian.org>  Wed, 14 Jan 2009 11:28:36 +0100
+
+haproxy (1.3.15.2-2) unstable; urgency=low
+
+  * Add stable branch bug fixes from upstream (Closes: #510185).
+    - use_backend-consider-unless.patch: consider "unless" in use_backend
+    - segfault-url_param+check_post.patch: fix segfault with url_param +
+    check_post
+    - server-timeout.patch: consider server timeout in all circumstances
+    - closed-fd-remove.patch: drop info about closed file descriptors
+    - connection-slot-during-retry.patch: do not release the connection slot
+    during a retry
+    - srv_dynamic_maxconn.patch: dynamic connection throttling api fix
+    - do-not-pause-backends-on-reload.patch: make reload reliable
+    - acl-in-default.patch: allow acl-related keywords in defaults sections
+    - cookie-capture-check.patch: cookie capture is declared in the frontend
+    but checked on the backend
+    - dead-servers-queue.patch: make dead servers not suck pending connections
+  * Add quilt build-dependancy. Use quilt in debian/rules to apply
+    patches.
+
+ -- Arnaud Cornet <acornet@debian.org>  Wed, 31 Dec 2008 08:50:21 +0100
+
+haproxy (1.3.15.2-1) unstable; urgency=low
+
+  * New Upstream Version (Closes: #497186).
+
+ -- Arnaud Cornet <acornet@debian.org>  Sat, 30 Aug 2008 18:06:31 +0200
+
+haproxy (1.3.15.1-1) unstable; urgency=low
+
+  * New Upstream Version
+  * Upgrade standards version to 3.8.0 (no change needed).
+  * Build with TARGET=linux26 on linux, TARGET=generic on other systems.
+
+ -- Arnaud Cornet <acornet@debian.org>  Fri, 20 Jun 2008 00:38:50 +0200
+
+haproxy (1.3.14.5-1) unstable; urgency=low
+
+  * New Upstream Version (Closes: #484221)
+  * Use debhelper 7, drop CDBS.
+
+ -- Arnaud Cornet <acornet@debian.org>  Wed, 04 Jun 2008 19:21:56 +0200
+
+haproxy (1.3.14.3-1) unstable; urgency=low
+
+  * New Upstream Version
+  * Add status argument support to init-script to conform to LSB.
+  * Cleanup pidfile after stop in init script. Init script return code fixups.
+
+ -- Arnaud Cornet <acornet@debian.org>  Sun, 09 Mar 2008 21:30:29 +0100
+
+haproxy (1.3.14.2-3) unstable; urgency=low
+
+  * Add init script support for nbproc > 1 in configuration. That is,
+    multiple haproxy processes.
+  * Use 'option redispatch' instead of redispatch in debian default
+    config.
+
+ -- Arnaud Cornet <acornet@debian.org>  Sun, 03 Feb 2008 18:22:28 +0100
+
+haproxy (1.3.14.2-2) unstable; urgency=low
+
+  * Fix init scripts's reload function to use -sf instead of -st (to wait for
+    active session to finish cleanly). Also support dash. Thanks to
+    Jean-Baptiste Quenot for noticing.
+
+ -- Arnaud Cornet <acornet@debian.org>  Thu, 24 Jan 2008 23:47:26 +0100
+
+haproxy (1.3.14.2-1) unstable; urgency=low
+
+  * New Upstream Version
+  * Simplify DEB_MAKE_INVOKE, as upstream now supports us overriding
+    CFLAGS.
+  * Move haproxy to usr/sbin.
+
+ -- Arnaud Cornet <acornet@debian.org>  Mon, 21 Jan 2008 22:42:51 +0100
+
+haproxy (1.3.14.1-1) unstable; urgency=low
+
+  * New upstream release.
+  * Drop dfsg list and hash code rewrite (merged upstream).
+  * Add a HAPROXY variable in init script.
+  * Drop makefile patch, fix debian/rules accordingly. Drop build-dependancy
+    on quilt.
+  * Manpage now upstream. Ship upstream's and drop ours.
+
+ -- Arnaud Cornet <acornet@debian.org>  Tue, 01 Jan 2008 22:50:09 +0100
+
+haproxy (1.3.12.dfsg2-1) unstable; urgency=low
+
+  * New upstream bugfix release.
+  * Use new Homepage tag.
+  * Bump standards-version (no change needed).
+  * Add build-depend on quilt and add patch to allow proper CFLAGS passing to
+    make.
+
+ -- Arnaud Cornet <acornet@debian.org>  Tue, 25 Dec 2007 21:52:59 +0100
+
+haproxy (1.3.12.dfsg-1) unstable; urgency=low
+
+  * Initial release (Closes: #416397).
+  * The DFSG removes files with GPL-incompabitle license and adds a
+    re-implementation by me.
+
+ -- Arnaud Cornet <acornet@debian.org>  Fri, 17 Aug 2007 09:33:41 +0200
diff --git a/clean b/clean
new file mode 100644 (file)
index 0000000..8d9fe01
--- /dev/null
+++ b/clean
@@ -0,0 +1,3 @@
+doc/configuration.html
+doc/intro.html
+doc/management.html
diff --git a/compat b/compat
new file mode 100644 (file)
index 0000000..f599e28
--- /dev/null
+++ b/compat
@@ -0,0 +1 @@
+10
diff --git a/control b/control
new file mode 100644 (file)
index 0000000..cc08457
--- /dev/null
+++ b/control
@@ -0,0 +1,57 @@
+Source: haproxy
+Section: net
+Priority: optional
+Maintainer: Debian HAProxy Maintainers <haproxy@tracker.debian.org>
+Uploaders: Apollon Oikonomopoulos <apoikos@debian.org>,
+           Prach Pongpanich <prach@debian.org>,
+           Vincent Bernat <bernat@debian.org>
+Standards-Version: 4.4.0
+Build-Depends: debhelper (>= 10),
+               libpcre2-dev,
+               libssl-dev,
+               liblua5.3-dev,
+               libsystemd-dev [linux-any],
+               python3-sphinx,
+               zlib1g-dev
+Build-Depends-Indep: python3, python3-mako
+Homepage: http://www.haproxy.org/
+Vcs-Git: https://salsa.debian.org/haproxy-team/haproxy.git
+Vcs-Browser: https://salsa.debian.org/haproxy-team/haproxy
+
+Package: haproxy
+Architecture: any
+Depends: ${shlibs:Depends}, ${misc:Depends}, adduser, lsb-base (>= 3.0-6)
+Pre-Depends: dpkg (>= 1.17.14)
+Suggests: vim-haproxy, haproxy-doc
+Description: fast and reliable load balancing reverse proxy
+ HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high
+ availability environments. It features connection persistence through HTTP
+ cookies, load balancing, header addition, modification, deletion both ways. It
+ has request blocking capabilities and provides interface to display server
+ status.
+
+Package: haproxy-doc
+Section: doc
+Architecture: all
+Depends: ${misc:Depends}, libjs-bootstrap (<< 4), libjs-jquery,
+ ${sphinxdoc:Depends}
+Pre-Depends: dpkg (>= 1.17.14)
+Description: fast and reliable load balancing reverse proxy (HTML documentation)
+ HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high
+ availability environments. It features connection persistence through HTTP
+ cookies, load balancing, header addition, modification, deletion both ways. It
+ has request blocking capabilities and provides interface to display server
+ status.
+ .
+ This package contains the HTML documentation for haproxy.
+
+Package: vim-haproxy
+Architecture: all
+Depends: ${misc:Depends}
+Recommends: vim-addon-manager
+Description: syntax highlighting for HAProxy configuration files
+ The vim-haproxy package provides filetype detection and syntax highlighting
+ for HAProxy configuration files.
+ .
+ As per the Debian vim policy, installed addons are not activated
+ automatically, but the "vim-addon-manager" tool can be used for this purpose.
diff --git a/copyright b/copyright
new file mode 100644 (file)
index 0000000..1b9cdf3
--- /dev/null
+++ b/copyright
@@ -0,0 +1,261 @@
+Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Upstream-Name: haproxy
+Upstream-Contact: Willy Tarreau <w@1wt.eu>
+Source: http://www.haproxy.org/
+
+Files: *
+Copyright: Copyright 2000-2015  Willy Tarreau <w@1wt.eu>.
+License: GPL-2+
+Comment:
+ An OpenSSL exception is present in the LICENSE file: "This program is
+ released under the GPL with the additional exemption that compiling,
+ linking, and/or using OpenSSL is allowed."
+
+Files: ebtree/*
+       include/*
+       contrib/halog/fgets2.c
+Copyright: Copyright 2000-2013 Willy Tarreau - w@1wt.eu
+License: LGPL-2.1
+
+Files: include/proto/auth.h
+       include/types/checks.h
+       include/types/auth.h
+       src/auth.c
+Copyright: Copyright 2008-2010 Krzysztof Piotr Oledzki <ole@ans.pl>
+License: GPL-2+
+
+Files: include/import/lru.h
+       src/lru.c
+Copyright: Copyright (C) 2015 Willy Tarreau <w@1wt.eu>
+License: Expat
+
+Files: include/import/xxhash.h
+       src/xxhash.c
+Copyright: Copyright (C) 2012-2014, Yann Collet.
+License: BSD-2-clause
+
+Files: include/proto/shctx.h
+       src/shctx.c
+Copyright: Copyright (C) 2011-2012 EXCELIANCE
+License: GPL-2+
+
+Files: include/proto/compression.h
+       include/types/compression.h
+Copyright: Copyright 2012 (C) Exceliance, David Du Colombier <dducolombier@exceliance.fr>
+                                          William Lallemand <wlallemand@exceliance.fr>
+License: LGPL-2.1
+
+Files: include/proto/peers.h
+       include/proto/ssl_sock.h
+       include/types/peers.h
+       include/types/ssl_sock.h
+Copyright: Copyright (C) 2009-2012 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
+License: LGPL-2.1
+
+Files: include/types/dns.h
+Copyright: Copyright (C) 2014 Baptiste Assmann <bedis9@gmail.com>
+License: LGPL-2.1
+
+Files: src/dns.c
+Copyright: Copyright (C) 2014 Baptiste Assmann <bedis9@gmail.com>
+License: GPL-2+
+
+Files: include/types/mailers.h
+       src/mailers.c
+Copyright: Copyright 2015 Horms Solutions Ltd., Simon Horman <horms@verge.net.au>
+           Copyright 2010 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
+License: LGPL-2.1
+
+Files: include/proto/sample.h
+       include/proto/stick_table.h
+       include/types/sample.h
+       include/types/stick_table.h
+Copyright: Copyright (C) 2009-2012 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
+           Copyright (C) 2010-2013 Willy Tarreau <w@1wt.eu>
+License: LGPL-2.1
+
+Files: include/types/counters.h
+Copyright: Copyright 2008-2009 Krzysztof Piotr Oledzki <ole@ans.pl>
+           Copyright 2011 Willy Tarreau <w@1wt.eu>
+License: LGPL-2.1
+
+Files: include/common/base64.h
+       include/common/uri_auth.h
+       include/proto/signal.h
+       include/types/signal.h
+Copyright: Copyright 2000-2013 Willy Tarreau <w@1wt.eu>
+License: GPL-2+
+
+Files: include/common/rbtree.h
+Copyright: (C) 1999 Andrea Arcangeli <andrea@suse.de>
+License: GPL-2+
+
+Files: src/base64.c
+       src/checks.c
+       src/stats.c
+       src/server.c
+Copyright: Copyright 2000-2012 Willy Tarreau <w@1wt.eu>
+           Copyright 2007-2010 Krzysztof Piotr Oledzki <ole@ans.pl>
+License: GPL-2+
+
+Files: src/compression.c
+Copyright: Copyright 2012 (C) Exceliance, David Du Colombier <dducolombier@exceliance.fr>
+                                          William Lallemand <wlallemand@exceliance.fr>
+License: GPL-2+
+
+Files: src/rbtree.c
+Copyright: (C) 1999 Andrea Arcangeli <andrea@suse.de>
+           (C) 2002 David Woodhouse <dwmw2@infradead.org>
+License: GPL-2+
+
+Files: src/sample.c
+       src/stick_table.c
+Copyright: Copyright 2009-2010 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
+           Copyright (C) 2010-2012 Willy Tarreau <w@1wt.eu>
+License: GPL-2+
+
+Files: src/peers.c
+       src/ssl_sock.c
+Copyright: Copyright (C) 2010-2012 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
+License: GPL-2+
+
+Files: contrib/netsnmp-perl/haproxy.pl
+       contrib/base64/base64rev-gen.c
+Copyright: Copyright 2007-2010 Krzysztof Piotr Oledzki <ole@ans.pl>
+License: GPL-2+
+
+Files: examples/stats_haproxy.sh
+Copyright: Copyright 2007 Julien Antony and Matthieu Huguet
+License: GPL-2+
+
+Files: examples/check
+Copyright: 2006-2007 (C) Fabrice Dulaunoy <fabrice@dulaunoy.com>
+License: GPL-2+
+
+Files: tests/test_pools.c
+Copyright: Copyright 2007 Aleksandar Lazic <al-haproxy@none.at>
+License: GPL-2+
+
+Files: debian/*
+Copyright: Copyright (C) 2007-2011, Arnaud Cornet <acornet@debian.org>
+           Copyright (C) 2011, Christo Buschek <crito@30loops.net>
+           Copyright (C) 2013, Prach Pongpanich <prachpub@gmail.com>
+           Copyright (C) 2013-2016, Apollon Oikonomopoulos <apoikos@debian.org>
+           Copyright (C) 2013-2016, Vincent Bernat <bernat@debian.org>
+License: GPL-2
+
+Files: debian/dconv/*
+Copyright: Copyright (C) 2012 Cyril Bonté
+License: Apache-2.0
+
+Files: debian/dconv/js/typeahead.bundle.js
+Copyright: Copyright 2013-2015 Twitter, Inc. and other contributors
+License: Expat
+License: GPL-2+
+ This program is free software; you can redistribute it
+ and/or modify it under the terms of the GNU General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later
+ version.
+ .
+ This program is distributed in the hope that it will be
+ useful, but WITHOUT ANY WARRANTY; without even the implied
+ warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the GNU General Public License for more
+ details.
+ .
+ You should have received a copy of the GNU General Public
+ License along with this package; if not, write to the Free
+ Software Foundation, Inc., 51 Franklin St, Fifth Floor,
+ Boston, MA  02110-1301 USA
+ .
+ On Debian systems, the full text of the GNU General Public
+ License version 2 can be found in the file
+ `/usr/share/common-licenses/GPL-2'.
+
+License: LGPL-2.1
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+ .
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ Lesser General Public License for more details.
+ .
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
+ .
+ On Debian systems, the complete text of the GNU Lesser General Public License,
+ version 2.1, can be found in /usr/share/common-licenses/LGPL-2.1.
+
+License: GPL-2
+ This program is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License version 2 as
+ published by the Free Software Foundation.
+ .
+ On Debian systems, the complete text of the GNU General Public License, version
+ 2, can be found in /usr/share/common-licenses/GPL-2.
+
+License: Apache-2.0
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ .
+     http://www.apache.org/licenses/LICENSE-2.0
+ .
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ .
+ On Debian systems, the full text of the Apache License version 2.0 can be
+ found in the file `/usr/share/common-licenses/Apache-2.0'.
+
+License: Expat
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+ .
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+ .
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+License: BSD-2-clause
+ Redistribution and use in source and binary forms, with or without
+ modification, are permitted provided that the following conditions are
+ met:
+ .
+     * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+     * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following disclaimer
+ in the documentation and/or other materials provided with the
+ distribution.
+ .
+ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/dconv/LICENSE b/dconv/LICENSE
new file mode 100644 (file)
index 0000000..d645695
--- /dev/null
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/dconv/NOTICE b/dconv/NOTICE
new file mode 100644 (file)
index 0000000..c9575a7
--- /dev/null
@@ -0,0 +1,13 @@
+Copyright 2012 Cyril Bonté
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/dconv/README.md b/dconv/README.md
new file mode 100644 (file)
index 0000000..4ca89b2
--- /dev/null
@@ -0,0 +1,21 @@
+# HAProxy Documentation Converter
+
+Made to convert the HAProxy documentation into HTML.
+
+More than HTML, the main goal is to provide easy navigation.
+
+## Documentations
+
+A bot periodically fetches last commits for HAProxy 1.4 and 1.5 to produce up-to-date documentations.
+
+Converted documentations are then stored online :
+- HAProxy 1.4 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.4.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.4.html)
+- HAProxy 1.5 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.5.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.5.html)
+- HAProxy 1.6 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.6.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.6.html)
+
+
+## Contribute
+
+The project now lives by itself, as it is sufficiently useable. But I'm sure we can do even better.
+Feel free to report feature requests or to provide patches !
+
diff --git a/dconv/css/check.png b/dconv/css/check.png
new file mode 100644 (file)
index 0000000..a7fab32
Binary files /dev/null and b/dconv/css/check.png differ
diff --git a/dconv/css/cross.png b/dconv/css/cross.png
new file mode 100644 (file)
index 0000000..24f5064
Binary files /dev/null and b/dconv/css/cross.png differ
diff --git a/dconv/css/page.css b/dconv/css/page.css
new file mode 100644 (file)
index 0000000..b48fdd2
--- /dev/null
@@ -0,0 +1,223 @@
+/* Global Styles */
+
+body {
+       margin-top: 50px;
+       background: #eee;
+}
+
+a.anchor {
+       display: block; position: relative; top: -50px; visibility: hidden;
+}
+
+/* ------------------------------- */
+
+/* Wrappers */
+
+/* ------------------------------- */
+
+#wrapper {
+    width: 100%;
+}
+
+#page-wrapper {
+    padding: 0 15px 50px;
+    width: 740px;
+    background-color: #fff;
+    margin-left: 250px;
+}
+
+#sidebar {
+       position: fixed;
+       width: 250px;
+       top: 50px;
+       bottom: 0;
+       padding: 15px;
+       background: #f5f5f5;
+       border-right: 1px solid #ccc;
+}
+
+
+/* ------------------------------- */
+
+/* Twitter typeahead.js */
+
+/* ------------------------------- */
+
+.twitter-typeahead {
+    width: 100%;
+}
+.typeahead,
+.tt-query,
+.tt-hint {
+    width: 100%;
+  padding: 8px 12px;
+  border: 2px solid #ccc;
+  -webkit-border-radius: 8px;
+     -moz-border-radius: 8px;
+          border-radius: 8px;
+  outline: none;
+}
+
+.typeahead {
+  background-color: #fff;
+}
+
+.typeahead:focus {
+  border: 2px solid #0097cf;
+}
+
+.tt-query {
+  -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
+     -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
+          box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
+}
+
+.tt-hint {
+  color: #999
+}
+
+.tt-menu {
+  width: 100%;
+  margin-top: 4px;
+  padding: 8px 0;
+  background-color: #fff;
+  border: 1px solid #ccc;
+  border: 1px solid rgba(0, 0, 0, 0.2);
+  -webkit-border-radius: 8px;
+     -moz-border-radius: 8px;
+          border-radius: 8px;
+  -webkit-box-shadow: 0 5px 10px rgba(0,0,0,.2);
+     -moz-box-shadow: 0 5px 10px rgba(0,0,0,.2);
+          box-shadow: 0 5px 10px rgba(0,0,0,.2);
+}
+
+.tt-suggestion {
+  padding: 3px 8px;
+  line-height: 24px;
+}
+
+.tt-suggestion:hover {
+  cursor: pointer;
+  color: #fff;
+  background-color: #0097cf;
+}
+
+.tt-suggestion.tt-cursor {
+  color: #fff;
+  background-color: #0097cf;
+
+}
+
+.tt-suggestion p {
+  margin: 0;
+}
+
+#searchKeyword {
+    width: 100%;
+    margin: 0;
+}
+
+#searchKeyword .tt-menu {
+  max-height: 300px;
+  overflow-y: auto;
+}
+
+/* ------------------------------- */
+
+/* Misc */
+
+/* ------------------------------- */
+
+.well-small ul {
+       padding: 0px;
+}
+.table th,
+.table td.pagination-centered {
+       text-align: center;
+}
+
+pre {
+       overflow: visible; /* Workaround for dropdown menus */
+}
+
+pre.text {
+        padding: 0;
+       font-size: 13px;
+       color: #000;
+       background: transparent;
+        border: none;
+        margin-bottom: 18px;
+}
+pre.arguments {
+        font-size: 13px;
+        color: #000;
+        background: transparent;
+}
+
+.comment {
+       color: #888;
+}
+small, .small {
+       color: #888;
+}
+.level1 {
+       font-size: 125%;
+}
+.sublevels {
+       border-left: 1px solid #ccc;
+       padding-left: 10px;
+}
+.tab {
+               padding-left: 20px;
+}
+.keyword  {
+    font-family: Menlo, Monaco, "Courier New", monospace;
+    white-space: pre;
+    background: #eee;
+    border-top: 1px solid #fff;
+    border-bottom: 1px solid #ccc;
+}
+
+.label-see-also {
+       background-color: #999;
+}
+.label-disabled {
+       background-color: #ccc;
+}
+h5 {
+       text-decoration: underline;
+}
+
+.example-desc {
+       border-bottom: 1px solid #ccc;
+       margin-bottom: 18px;
+}
+.noheight {
+       min-height: 0 !important;
+}
+.separator {
+    margin-bottom: 18px;
+}
+
+div {
+       word-wrap: break-word;
+}
+
+html, body {
+       width: 100%;
+       min-height: 100%:
+}
+
+.dropdown-menu > li {
+       white-space: nowrap;
+}
+/* TEMPORARILY HACKS WHILE PRE TAGS ARE USED
+-------------------------------------------------- */
+
+h5,
+.unpre,
+.example-desc,
+.dropdown-menu {
+       font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
+        white-space: normal;
+}
diff --git a/dconv/haproxy-dconv.py b/dconv/haproxy-dconv.py
new file mode 100755 (executable)
index 0000000..ec800cf
--- /dev/null
@@ -0,0 +1,534 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright 2012 Cyril Bonté
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''
+TODO : ability to split chapters into several files
+TODO : manage keyword locality (server/proxy/global ; ex : maxconn)
+TODO : Remove global variables where possible
+'''
+import os
+import subprocess
+import sys
+import html
+import re
+import time
+import datetime
+
+from optparse import OptionParser
+
+from mako.template import Template
+from mako.lookup import TemplateLookup
+from mako.exceptions import TopLevelLookupException
+
+from parser import PContext
+from parser import remove_indent
+from parser import *
+
+from urllib.parse import quote
+
+VERSION = ""
+HAPROXY_GIT_VERSION = False
+
+def main():
+    global VERSION, HAPROXY_GIT_VERSION
+
+    usage="Usage: %prog --infile <infile> --outfile <outfile>"
+
+    optparser = OptionParser(description='Generate HTML Document from HAProxy configuation.txt',
+                          version=VERSION,
+                          usage=usage)
+    optparser.add_option('--infile', '-i', help='Input file mostly the configuration.txt')
+    optparser.add_option('--outfile','-o', help='Output file')
+    optparser.add_option('--base','-b', default = '', help='Base directory for relative links')
+    (option, args) = optparser.parse_args()
+
+    if not (option.infile  and option.outfile) or len(args) > 0:
+        optparser.print_help()
+        exit(1)
+
+    option.infile = os.path.abspath(option.infile)
+    option.outfile = os.path.abspath(option.outfile)
+
+    os.chdir(os.path.dirname(__file__))
+
+    VERSION = get_git_version()
+    if not VERSION:
+        sys.exit(1)
+
+    HAPROXY_GIT_VERSION = get_haproxy_git_version(os.path.dirname(option.infile))
+
+    convert(option.infile, option.outfile, option.base)
+
+
+# Temporarily determine the version from git to follow which commit generated
+# the documentation
+def get_git_version():
+    if not os.path.isdir(".git"):
+        print("This does not appear to be a Git repository.", file=sys.stderr)
+        return
+    try:
+        p = subprocess.Popen(["git", "describe", "--tags", "--match", "v*"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    except EnvironmentError:
+        print("Unable to run git", file=sys.stderr)
+        return
+    version = p.communicate()[0]
+    if p.returncode != 0:
+        print("Unable to run git", file=sys.stderr)
+        return
+
+    if len(version) < 2:
+        return
+
+    version = version[1:].strip()
+    version = re.sub(r'-g.*', '', version)
+    return version
+
+def get_haproxy_git_version(path):
+    try:
+        p = subprocess.Popen(["git", "describe", "--tags", "--match", "v*"], cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    except EnvironmentError:
+        return False
+    version = p.communicate()[0]
+
+    if p.returncode != 0:
+        return False
+
+    if len(version) < 2:
+        return False
+
+    version = version[1:].strip()
+    version = re.sub(r'-g.*', '', version)
+    return version
+
+def getTitleDetails(string):
+    array = string.split(".")
+
+    title    = array.pop().strip()
+    chapter  = ".".join(array)
+    level    = max(1, len(array))
+    if array:
+        toplevel = array[0]
+    else:
+        toplevel = False
+
+    return {
+            "title"   : title,
+            "chapter" : chapter,
+            "level"   : level,
+            "toplevel": toplevel
+    }
+
+# Parse the whole document to insert links on keywords
+def createLinks():
+    global document, keywords, keywordsCount, keyword_conflicts, chapters
+
+    print("Generating keywords links...", file=sys.stderr)
+
+    delimiters = [
+        dict(start='&quot;', end='&quot;', multi=True ),
+        dict(start='- '    , end='\n'    , multi=False),
+    ]
+
+    for keyword in keywords:
+        keywordsCount[keyword] = 0
+        for delimiter in delimiters:
+            keywordsCount[keyword] += document.count(delimiter['start'] + keyword + delimiter['end'])
+        if (keyword in keyword_conflicts) and (not keywordsCount[keyword]):
+            # The keyword is never used, we can remove it from the conflicts list
+            del keyword_conflicts[keyword]
+
+        if keyword in keyword_conflicts:
+            chapter_list = ""
+            for chapter in keyword_conflicts[keyword]:
+                chapter_list += '<li><a href="#%s">%s</a></li>' % (quote("%s (%s)" % (keyword, chapters[chapter]['title'])), chapters[chapter]['title'])
+            for delimiter in delimiters:
+                if delimiter['multi']:
+                    document = document.replace(delimiter['start'] + keyword + delimiter['end'],
+                            delimiter['start'] + '<span class="dropdown">' +
+                            '<a class="dropdown-toggle" data-toggle="dropdown" href="#">' +
+                            keyword +
+                            '<span class="caret"></span>' +
+                            '</a>' +
+                            '<ul class="dropdown-menu">' +
+                            '<li class="dropdown-header">This keyword is available in sections :</li>' +
+                            chapter_list +
+                            '</ul>' +
+                            '</span>' + delimiter['end'])
+                else:
+                    document = document.replace(delimiter['start'] + keyword + delimiter['end'], delimiter['start'] + '<a href="#' + quote(keyword) + '">' + keyword + '</a>' + delimiter['end'])
+        else:
+            for delimiter in delimiters:
+                document = document.replace(delimiter['start'] + keyword + delimiter['end'], delimiter['start'] + '<a href="#' + quote(keyword) + '">' + keyword + '</a>' + delimiter['end'])
+        if keyword.startswith("option "):
+            shortKeyword = keyword[len("option "):]
+            keywordsCount[shortKeyword] = 0
+            for delimiter in delimiters:
+                keywordsCount[keyword] += document.count(delimiter['start'] + shortKeyword + delimiter['end'])
+            if (shortKeyword in keyword_conflicts) and (not keywordsCount[shortKeyword]):
+            # The keyword is never used, we can remove it from the conflicts list
+                del keyword_conflicts[shortKeyword]
+            for delimiter in delimiters:
+                document = document.replace(delimiter['start'] + shortKeyword + delimiter['start'], delimiter['start'] + '<a href="#' + quote(keyword) + '">' + shortKeyword + '</a>' + delimiter['end'])
+
+def documentAppend(text, retline = True):
+    global document
+    document += text
+    if retline:
+        document += "\n"
+
+def init_parsers(pctxt):
+    return [
+        underline.Parser(pctxt),
+        arguments.Parser(pctxt),
+        seealso.Parser(pctxt),
+        example.Parser(pctxt),
+        table.Parser(pctxt),
+        underline.Parser(pctxt),
+        keyword.Parser(pctxt),
+    ]
+
+# The parser itself
+def convert(infile, outfile, base=''):
+    global document, keywords, keywordsCount, chapters, keyword_conflicts
+
+    if len(base) > 0 and base[:-1] != '/':
+        base += '/'
+
+    hasSummary = False
+
+    data = []
+    fd = open(infile,"r")
+    for line in fd:
+        line.replace("\t", " " * 8)
+        line = line.rstrip()
+        data.append(line)
+    fd.close()
+
+    pctxt = PContext(
+        TemplateLookup(
+            directories=[
+                'templates'
+            ]
+        )
+    )
+
+    parsers = init_parsers(pctxt)
+
+    pctxt.context = {
+            'headers':  {},
+            'document': "",
+            'base':     base,
+    }
+
+    sections = []
+    currentSection = {
+            "details": getTitleDetails(""),
+            "content": "",
+    }
+
+    chapters = {}
+
+    keywords = {}
+    keywordsCount = {}
+
+    specialSections = {
+            "default": {
+                    "hasKeywords": True,
+            },
+            "4.1": {
+                    "hasKeywords": True,
+            },
+    }
+
+    pctxt.keywords = keywords
+    pctxt.keywordsCount = keywordsCount
+    pctxt.chapters = chapters
+
+    print("Importing %s..." % infile, file=sys.stderr)
+
+    nblines = len(data)
+    i = j = 0
+    while i < nblines:
+        line = data[i].rstrip()
+        if i < nblines - 1:
+            next = data[i + 1].rstrip()
+        else:
+            next = ""
+        if (line == "Summary" or re.match("^[0-9].*", line)) and (len(next) > 0) and (next[0] == '-') \
+                and ("-" * len(line)).startswith(next):  # Fuzzy underline length detection
+            sections.append(currentSection)
+            currentSection = {
+                "details": getTitleDetails(line),
+                "content": "",
+            }
+            j = 0
+            i += 1 # Skip underline
+            while not data[i + 1].rstrip():
+                i += 1 # Skip empty lines
+
+        else:
+            if len(line) > 80:
+                print("Line `%i' exceeds 80 columns" % (i + 1), file=sys.stderr)
+
+            currentSection["content"] = currentSection["content"] + line + "\n"
+            j += 1
+            if currentSection["details"]["title"] == "Summary" and line != "":
+                hasSummary = True
+                # Learn chapters from the summary
+                details = getTitleDetails(line)
+                if details["chapter"]:
+                    chapters[details["chapter"]] = details
+        i += 1
+    sections.append(currentSection)
+
+    chapterIndexes = sorted(chapters.keys())
+
+    document = ""
+
+    # Complete the summary
+    for section in sections:
+        details = section["details"]
+        title = details["title"]
+        if title:
+            fulltitle = title
+            if details["chapter"]:
+                #documentAppend("<a name=\"%s\"></a>" % details["chapter"])
+                fulltitle = details["chapter"] + ". " + title
+                if not details["chapter"] in chapters:
+                    print("Adding '%s' to the summary" % details["title"], file=sys.stderr)
+                    chapters[details["chapter"]] = details
+                    chapterIndexes = sorted(chapters.keys())
+
+    for section in sections:
+        details = section["details"]
+        pctxt.details = details
+        level = details["level"]
+        title = details["title"]
+        content = section["content"].rstrip()
+
+        print("Parsing chapter %s..." % title, file=sys.stderr)
+
+        if (title == "Summary") or (title and not hasSummary):
+            summaryTemplate = pctxt.templates.get_template('summary.html')
+            documentAppend(summaryTemplate.render(
+                pctxt = pctxt,
+                chapters = chapters,
+                chapterIndexes = chapterIndexes,
+            ))
+            if title and not hasSummary:
+                hasSummary = True
+            else:
+                continue
+
+        if title:
+            documentAppend('<a class="anchor" id="%s" name="%s"></a>' % (details["chapter"], details["chapter"]))
+            if level == 1:
+                documentAppend("<div class=\"page-header\">", False)
+            documentAppend('<h%d id="chapter-%s" data-target="%s"><small><a class="small" href="#%s">%s.</a></small> %s</h%d>' % (level, details["chapter"], details["chapter"], details["chapter"], details["chapter"], html.escape(title, True), level))
+            if level == 1:
+                documentAppend("</div>", False)
+
+        if content:
+            if False and title:
+                # Display a navigation bar
+                documentAppend('<ul class="well pager">')
+                documentAppend('<li><a href="#top">Top</a></li>', False)
+                index = chapterIndexes.index(details["chapter"])
+                if index > 0:
+                    documentAppend('<li class="previous"><a href="#%s">Previous</a></li>' % chapterIndexes[index - 1], False)
+                if index < len(chapterIndexes) - 1:
+                    documentAppend('<li class="next"><a href="#%s">Next</a></li>' % chapterIndexes[index + 1], False)
+                documentAppend('</ul>', False)
+            content = html.escape(content, True)
+            content = re.sub(r'section ([0-9]+(.[0-9]+)*)', r'<a href="#\1">section \1</a>', content)
+
+            pctxt.set_content(content)
+
+            if not title:
+                lines = pctxt.get_lines()
+                pctxt.context['headers'] = {
+                    'title':    '',
+                    'subtitle': '',
+                    'version':  '',
+                    'author':   '',
+                    'date':     ''
+                }
+                if re.match("^-+$", pctxt.get_line().strip()):
+                    # Try to analyze the header of the file, assuming it follows
+                    # those rules :
+                    # - it begins with a "separator line" (several '-' chars)
+                    # - then the document title
+                    # - an optional subtitle
+                    # - a new separator line
+                    # - the version
+                    # - the author
+                    # - the date
+                    pctxt.next()
+                    pctxt.context['headers']['title'] = pctxt.get_line().strip()
+                    pctxt.next()
+                    subtitle = ""
+                    while not re.match("^-+$", pctxt.get_line().strip()):
+                        subtitle += " " + pctxt.get_line().strip()
+                        pctxt.next()
+                    pctxt.context['headers']['subtitle'] += subtitle.strip()
+                    if not pctxt.context['headers']['subtitle']:
+                        # No subtitle, try to guess one from the title if it
+                        # starts with the word "HAProxy"
+                        if pctxt.context['headers']['title'].startswith('HAProxy '):
+                            pctxt.context['headers']['subtitle'] = pctxt.context['headers']['title'][8:]
+                            pctxt.context['headers']['title'] = 'HAProxy'
+                    pctxt.next()
+                    pctxt.context['headers']['version'] = pctxt.get_line().strip()
+                    pctxt.next()
+                    pctxt.context['headers']['author'] = pctxt.get_line().strip()
+                    pctxt.next()
+                    pctxt.context['headers']['date'] = pctxt.get_line().strip()
+                    pctxt.next()
+                    if HAPROXY_GIT_VERSION:
+                        pctxt.context['headers']['version'] = 'version ' + HAPROXY_GIT_VERSION
+
+                    # Skip header lines
+                    pctxt.eat_lines()
+                    pctxt.eat_empty_lines()
+
+            documentAppend('<div>', False)
+
+            delay = []
+            while pctxt.has_more_lines():
+                try:
+                    specialSection = specialSections[details["chapter"]]
+                except:
+                    specialSection = specialSections["default"]
+
+                line = pctxt.get_line()
+                if i < nblines - 1:
+                    nextline = pctxt.get_line(1)
+                else:
+                    nextline = ""
+
+                oldline = line
+                pctxt.stop = False
+                for parser in parsers:
+                    line = parser.parse(line)
+                    if pctxt.stop:
+                        break
+                if oldline == line:
+                    # nothing has changed,
+                    # delays the rendering
+                    if delay or line != "":
+                        delay.append(line)
+                    pctxt.next()
+                elif pctxt.stop:
+                    while delay and delay[-1].strip() == "":
+                        del delay[-1]
+                    if delay:
+                        remove_indent(delay)
+                        documentAppend('<pre class="text">%s\n</pre>' % "\n".join(delay), False)
+                    delay = []
+                    documentAppend(line, False)
+                else:
+                    while delay and delay[-1].strip() == "":
+                        del delay[-1]
+                    if delay:
+                        remove_indent(delay)
+                        documentAppend('<pre class="text">%s\n</pre>' % "\n".join(delay), False)
+                    delay = []
+                    documentAppend(line, True)
+                    pctxt.next()
+
+            while delay and delay[-1].strip() == "":
+                del delay[-1]
+            if delay:
+                remove_indent(delay)
+                documentAppend('<pre class="text">%s\n</pre>' % "\n".join(delay), False)
+            delay = []
+            documentAppend('</div>')
+
+    if not hasSummary:
+        summaryTemplate = pctxt.templates.get_template('summary.html')
+        print(chapters)
+        document = summaryTemplate.render(
+            pctxt = pctxt,
+            chapters = chapters,
+            chapterIndexes = chapterIndexes,
+        ) + document
+
+
+    # Log warnings for keywords defined in several chapters
+    keyword_conflicts = {}
+    for keyword in keywords:
+        keyword_chapters = list(keywords[keyword])
+        keyword_chapters.sort()
+        if len(keyword_chapters) > 1:
+            print('Multi section keyword : "%s" in chapters %s' % (keyword, list(keyword_chapters)), file=sys.stderr)
+            keyword_conflicts[keyword] = keyword_chapters
+
+    keywords = list(keywords)
+    keywords.sort()
+
+    createLinks()
+
+    # Add the keywords conflicts to the keywords list to make them available in the search form
+    # And remove the original keyword which is now useless
+    for keyword in keyword_conflicts:
+        sections = keyword_conflicts[keyword]
+        offset = keywords.index(keyword)
+        for section in sections:
+            keywords.insert(offset, "%s (%s)" % (keyword, chapters[section]['title']))
+            offset += 1
+        keywords.remove(keyword)
+
+    print("Exporting to %s..." % outfile, file=sys.stderr)
+
+    template = pctxt.templates.get_template('template.html')
+    try:
+        footerTemplate = pctxt.templates.get_template('footer.html')
+        footer = footerTemplate.render(
+            pctxt = pctxt,
+            headers = pctxt.context['headers'],
+            document = document,
+            chapters = chapters,
+            chapterIndexes = chapterIndexes,
+            keywords = keywords,
+            keywordsCount = keywordsCount,
+            keyword_conflicts = keyword_conflicts,
+            version = VERSION,
+            date = datetime.datetime.now().strftime("%Y/%m/%d"),
+        )
+    except TopLevelLookupException:
+        footer = ""
+
+    fd = open(outfile,'w')
+
+    print(template.render(
+            pctxt = pctxt,
+            headers = pctxt.context['headers'],
+            base = base,
+            document = document,
+            chapters = chapters,
+            chapterIndexes = chapterIndexes,
+            keywords = keywords,
+            keywordsCount = keywordsCount,
+            keyword_conflicts = keyword_conflicts,
+            version = VERSION,
+            date = datetime.datetime.now().strftime("%Y/%m/%d"),
+            footer = footer
+    ), file=fd)
+    fd.close()
+
+if __name__ == '__main__':
+    main()
diff --git a/dconv/img/logo-med.png b/dconv/img/logo-med.png
new file mode 100644 (file)
index 0000000..1be03b2
Binary files /dev/null and b/dconv/img/logo-med.png differ
diff --git a/dconv/js/typeahead.bundle.js b/dconv/js/typeahead.bundle.js
new file mode 100644 (file)
index 0000000..bb0c8ae
--- /dev/null
@@ -0,0 +1,2451 @@
+/*!
+ * typeahead.js 0.11.1
+ * https://github.com/twitter/typeahead.js
+ * Copyright 2013-2015 Twitter, Inc. and other contributors; Licensed MIT
+ */
+
+(function(root, factory) {
+    if (typeof define === "function" && define.amd) {
+        define("bloodhound", [ "jquery" ], function(a0) {
+            return root["Bloodhound"] = factory(a0);
+        });
+    } else if (typeof exports === "object") {
+        module.exports = factory(require("jquery"));
+    } else {
+        root["Bloodhound"] = factory(jQuery);
+    }
+})(this, function($) {
+    var _ = function() {
+        "use strict";
+        return {
+            isMsie: function() {
+                return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false;
+            },
+            isBlankString: function(str) {
+                return !str || /^\s*$/.test(str);
+            },
+            escapeRegExChars: function(str) {
+                return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&");
+            },
+            isString: function(obj) {
+                return typeof obj === "string";
+            },
+            isNumber: function(obj) {
+                return typeof obj === "number";
+            },
+            isArray: $.isArray,
+            isFunction: $.isFunction,
+            isObject: $.isPlainObject,
+            isUndefined: function(obj) {
+                return typeof obj === "undefined";
+            },
+            isElement: function(obj) {
+                return !!(obj && obj.nodeType === 1);
+            },
+            isJQuery: function(obj) {
+                return obj instanceof $;
+            },
+            toStr: function toStr(s) {
+                return _.isUndefined(s) || s === null ? "" : s + "";
+            },
+            bind: $.proxy,
+            each: function(collection, cb) {
+                $.each(collection, reverseArgs);
+                function reverseArgs(index, value) {
+                    return cb(value, index);
+                }
+            },
+            map: $.map,
+            filter: $.grep,
+            every: function(obj, test) {
+                var result = true;
+                if (!obj) {
+                    return result;
+                }
+                $.each(obj, function(key, val) {
+                    if (!(result = test.call(null, val, key, obj))) {
+                        return false;
+                    }
+                });
+                return !!result;
+            },
+            some: function(obj, test) {
+                var result = false;
+                if (!obj) {
+                    return result;
+                }
+                $.each(obj, function(key, val) {
+                    if (result = test.call(null, val, key, obj)) {
+                        return false;
+                    }
+                });
+                return !!result;
+            },
+            mixin: $.extend,
+            identity: function(x) {
+                return x;
+            },
+            clone: function(obj) {
+                return $.extend(true, {}, obj);
+            },
+            getIdGenerator: function() {
+                var counter = 0;
+                return function() {
+                    return counter++;
+                };
+            },
+            templatify: function templatify(obj) {
+                return $.isFunction(obj) ? obj : template;
+                function template() {
+                    return String(obj);
+                }
+            },
+            defer: function(fn) {
+                setTimeout(fn, 0);
+            },
+            debounce: function(func, wait, immediate) {
+                var timeout, result;
+                return function() {
+                    var context = this, args = arguments, later, callNow;
+                    later = function() {
+                        timeout = null;
+                        if (!immediate) {
+                            result = func.apply(context, args);
+                        }
+                    };
+                    callNow = immediate && !timeout;
+                    clearTimeout(timeout);
+                    timeout = setTimeout(later, wait);
+                    if (callNow) {
+                        result = func.apply(context, args);
+                    }
+                    return result;
+                };
+            },
+            throttle: function(func, wait) {
+                var context, args, timeout, result, previous, later;
+                previous = 0;
+                later = function() {
+                    previous = new Date();
+                    timeout = null;
+                    result = func.apply(context, args);
+                };
+                return function() {
+                    var now = new Date(), remaining = wait - (now - previous);
+                    context = this;
+                    args = arguments;
+                    if (remaining <= 0) {
+                        clearTimeout(timeout);
+                        timeout = null;
+                        previous = now;
+                        result = func.apply(context, args);
+                    } else if (!timeout) {
+                        timeout = setTimeout(later, remaining);
+                    }
+                    return result;
+                };
+            },
+            stringify: function(val) {
+                return _.isString(val) ? val : JSON.stringify(val);
+            },
+            noop: function() {}
+        };
+    }();
+    var VERSION = "0.11.1";
+    var tokenizers = function() {
+        "use strict";
+        return {
+            nonword: nonword,
+            whitespace: whitespace,
+            obj: {
+                nonword: getObjTokenizer(nonword),
+                whitespace: getObjTokenizer(whitespace)
+            }
+        };
+        function whitespace(str) {
+            str = _.toStr(str);
+            return str ? str.split(/\s+/) : [];
+        }
+        function nonword(str) {
+            str = _.toStr(str);
+            return str ? str.split(/\W+/) : [];
+        }
+        function getObjTokenizer(tokenizer) {
+            return function setKey(keys) {
+                keys = _.isArray(keys) ? keys : [].slice.call(arguments, 0);
+                return function tokenize(o) {
+                    var tokens = [];
+                    _.each(keys, function(k) {
+                        tokens = tokens.concat(tokenizer(_.toStr(o[k])));
+                    });
+                    return tokens;
+                };
+            };
+        }
+    }();
+    var LruCache = function() {
+        "use strict";
+        function LruCache(maxSize) {
+            this.maxSize = _.isNumber(maxSize) ? maxSize : 100;
+            this.reset();
+            if (this.maxSize <= 0) {
+                this.set = this.get = $.noop;
+            }
+        }
+        _.mixin(LruCache.prototype, {
+            set: function set(key, val) {
+                var tailItem = this.list.tail, node;
+                if (this.size >= this.maxSize) {
+                    this.list.remove(tailItem);
+                    delete this.hash[tailItem.key];
+                    this.size--;
+                }
+                if (node = this.hash[key]) {
+                    node.val = val;
+                    this.list.moveToFront(node);
+                } else {
+                    node = new Node(key, val);
+                    this.list.add(node);
+                    this.hash[key] = node;
+                    this.size++;
+                }
+            },
+            get: function get(key) {
+                var node = this.hash[key];
+                if (node) {
+                    this.list.moveToFront(node);
+                    return node.val;
+                }
+            },
+            reset: function reset() {
+                this.size = 0;
+                this.hash = {};
+                this.list = new List();
+            }
+        });
+        function List() {
+            this.head = this.tail = null;
+        }
+        _.mixin(List.prototype, {
+            add: function add(node) {
+                if (this.head) {
+                    node.next = this.head;
+                    this.head.prev = node;
+                }
+                this.head = node;
+                this.tail = this.tail || node;
+            },
+            remove: function remove(node) {
+                node.prev ? node.prev.next = node.next : this.head = node.next;
+                node.next ? node.next.prev = node.prev : this.tail = node.prev;
+            },
+            moveToFront: function(node) {
+                this.remove(node);
+                this.add(node);
+            }
+        });
+        function Node(key, val) {
+            this.key = key;
+            this.val = val;
+            this.prev = this.next = null;
+        }
+        return LruCache;
+    }();
+    var PersistentStorage = function() {
+        "use strict";
+        var LOCAL_STORAGE;
+        try {
+            LOCAL_STORAGE = window.localStorage;
+            LOCAL_STORAGE.setItem("~~~", "!");
+            LOCAL_STORAGE.removeItem("~~~");
+        } catch (err) {
+            LOCAL_STORAGE = null;
+        }
+        function PersistentStorage(namespace, override) {
+            this.prefix = [ "__", namespace, "__" ].join("");
+            this.ttlKey = "__ttl__";
+            this.keyMatcher = new RegExp("^" + _.escapeRegExChars(this.prefix));
+            this.ls = override || LOCAL_STORAGE;
+            !this.ls && this._noop();
+        }
+        _.mixin(PersistentStorage.prototype, {
+            _prefix: function(key) {
+                return this.prefix + key;
+            },
+            _ttlKey: function(key) {
+                return this._prefix(key) + this.ttlKey;
+            },
+            _noop: function() {
+                this.get = this.set = this.remove = this.clear = this.isExpired = _.noop;
+            },
+            _safeSet: function(key, val) {
+                try {
+                    this.ls.setItem(key, val);
+                } catch (err) {
+                    if (err.name === "QuotaExceededError") {
+                        this.clear();
+                        this._noop();
+                    }
+                }
+            },
+            get: function(key) {
+                if (this.isExpired(key)) {
+                    this.remove(key);
+                }
+                return decode(this.ls.getItem(this._prefix(key)));
+            },
+            set: function(key, val, ttl) {
+                if (_.isNumber(ttl)) {
+                    this._safeSet(this._ttlKey(key), encode(now() + ttl));
+                } else {
+                    this.ls.removeItem(this._ttlKey(key));
+                }
+                return this._safeSet(this._prefix(key), encode(val));
+            },
+            remove: function(key) {
+                this.ls.removeItem(this._ttlKey(key));
+                this.ls.removeItem(this._prefix(key));
+                return this;
+            },
+            clear: function() {
+                var i, keys = gatherMatchingKeys(this.keyMatcher);
+                for (i = keys.length; i--; ) {
+                    this.remove(keys[i]);
+                }
+                return this;
+            },
+            isExpired: function(key) {
+                var ttl = decode(this.ls.getItem(this._ttlKey(key)));
+                return _.isNumber(ttl) && now() > ttl ? true : false;
+            }
+        });
+        return PersistentStorage;
+        function now() {
+            return new Date().getTime();
+        }
+        function encode(val) {
+            return JSON.stringify(_.isUndefined(val) ? null : val);
+        }
+        function decode(val) {
+            return $.parseJSON(val);
+        }
+        function gatherMatchingKeys(keyMatcher) {
+            var i, key, keys = [], len = LOCAL_STORAGE.length;
+            for (i = 0; i < len; i++) {
+                if ((key = LOCAL_STORAGE.key(i)).match(keyMatcher)) {
+                    keys.push(key.replace(keyMatcher, ""));
+                }
+            }
+            return keys;
+        }
+    }();
+    var Transport = function() {
+        "use strict";
+        var pendingRequestsCount = 0, pendingRequests = {}, maxPendingRequests = 6, sharedCache = new LruCache(10);
+        function Transport(o) {
+            o = o || {};
+            this.cancelled = false;
+            this.lastReq = null;
+            this._send = o.transport;
+            this._get = o.limiter ? o.limiter(this._get) : this._get;
+            this._cache = o.cache === false ? new LruCache(0) : sharedCache;
+        }
+        Transport.setMaxPendingRequests = function setMaxPendingRequests(num) {
+            maxPendingRequests = num;
+        };
+        Transport.resetCache = function resetCache() {
+            sharedCache.reset();
+        };
+        _.mixin(Transport.prototype, {
+            _fingerprint: function fingerprint(o) {
+                o = o || {};
+                return o.url + o.type + $.param(o.data || {});
+            },
+            _get: function(o, cb) {
+                var that = this, fingerprint, jqXhr;
+                fingerprint = this._fingerprint(o);
+                if (this.cancelled || fingerprint !== this.lastReq) {
+                    return;
+                }
+                if (jqXhr = pendingRequests[fingerprint]) {
+                    jqXhr.done(done).fail(fail);
+                } else if (pendingRequestsCount < maxPendingRequests) {
+                    pendingRequestsCount++;
+                    pendingRequests[fingerprint] = this._send(o).done(done).fail(fail).always(always);
+                } else {
+                    this.onDeckRequestArgs = [].slice.call(arguments, 0);
+                }
+                function done(resp) {
+                    cb(null, resp);
+                    that._cache.set(fingerprint, resp);
+                }
+                function fail() {
+                    cb(true);
+                }
+                function always() {
+                    pendingRequestsCount--;
+                    delete pendingRequests[fingerprint];
+                    if (that.onDeckRequestArgs) {
+                        that._get.apply(that, that.onDeckRequestArgs);
+                        that.onDeckRequestArgs = null;
+                    }
+                }
+            },
+            get: function(o, cb) {
+                var resp, fingerprint;
+                cb = cb || $.noop;
+                o = _.isString(o) ? {
+                    url: o
+                } : o || {};
+                fingerprint = this._fingerprint(o);
+                this.cancelled = false;
+                this.lastReq = fingerprint;
+                if (resp = this._cache.get(fingerprint)) {
+                    cb(null, resp);
+                } else {
+                    this._get(o, cb);
+                }
+            },
+            cancel: function() {
+                this.cancelled = true;
+            }
+        });
+        return Transport;
+    }();
+    var SearchIndex = window.SearchIndex = function() {
+        "use strict";
+        var CHILDREN = "c", IDS = "i";
+        function SearchIndex(o) {
+            o = o || {};
+            if (!o.datumTokenizer || !o.queryTokenizer) {
+                $.error("datumTokenizer and queryTokenizer are both required");
+            }
+            this.identify = o.identify || _.stringify;
+            this.datumTokenizer = o.datumTokenizer;
+            this.queryTokenizer = o.queryTokenizer;
+            this.reset();
+        }
+        _.mixin(SearchIndex.prototype, {
+            bootstrap: function bootstrap(o) {
+                this.datums = o.datums;
+                this.trie = o.trie;
+            },
+            add: function(data) {
+                var that = this;
+                data = _.isArray(data) ? data : [ data ];
+                _.each(data, function(datum) {
+                    var id, tokens;
+                    that.datums[id = that.identify(datum)] = datum;
+                    tokens = normalizeTokens(that.datumTokenizer(datum));
+                    _.each(tokens, function(token) {
+                        var node, chars, ch;
+                        node = that.trie;
+                        chars = token.split("");
+                        while (ch = chars.shift()) {
+                            node = node[CHILDREN][ch] || (node[CHILDREN][ch] = newNode());
+                            node[IDS].push(id);
+                        }
+                    });
+                });
+            },
+            get: function get(ids) {
+                var that = this;
+                return _.map(ids, function(id) {
+                    return that.datums[id];
+                });
+            },
+            search: function search(query) {
+                var that = this, tokens, matches;
+                tokens = normalizeTokens(this.queryTokenizer(query));
+                _.each(tokens, function(token) {
+                    var node, chars, ch, ids;
+                    if (matches && matches.length === 0) {
+                        return false;
+                    }
+                    node = that.trie;
+                    chars = token.split("");
+                    while (node && (ch = chars.shift())) {
+                        node = node[CHILDREN][ch];
+                    }
+                    if (node && chars.length === 0) {
+                        ids = node[IDS].slice(0);
+                        matches = matches ? getIntersection(matches, ids) : ids;
+                    } else {
+                        matches = [];
+                        return false;
+                    }
+                });
+                return matches ? _.map(unique(matches), function(id) {
+                    return that.datums[id];
+                }) : [];
+            },
+            all: function all() {
+                var values = [];
+                for (var key in this.datums) {
+                    values.push(this.datums[key]);
+                }
+                return values;
+            },
+            reset: function reset() {
+                this.datums = {};
+                this.trie = newNode();
+            },
+            serialize: function serialize() {
+                return {
+                    datums: this.datums,
+                    trie: this.trie
+                };
+            }
+        });
+        return SearchIndex;
+        function normalizeTokens(tokens) {
+            tokens = _.filter(tokens, function(token) {
+                return !!token;
+            });
+            tokens = _.map(tokens, function(token) {
+                return token.toLowerCase();
+            });
+            return tokens;
+        }
+        function newNode() {
+            var node = {};
+            node[IDS] = [];
+            node[CHILDREN] = {};
+            return node;
+        }
+        function unique(array) {
+            var seen = {}, uniques = [];
+            for (var i = 0, len = array.length; i < len; i++) {
+                if (!seen[array[i]]) {
+                    seen[array[i]] = true;
+                    uniques.push(array[i]);
+                }
+            }
+            return uniques;
+        }
+        function getIntersection(arrayA, arrayB) {
+            var ai = 0, bi = 0, intersection = [];
+            arrayA = arrayA.sort();
+            arrayB = arrayB.sort();
+            var lenArrayA = arrayA.length, lenArrayB = arrayB.length;
+            while (ai < lenArrayA && bi < lenArrayB) {
+                if (arrayA[ai] < arrayB[bi]) {
+                    ai++;
+                } else if (arrayA[ai] > arrayB[bi]) {
+                    bi++;
+                } else {
+                    intersection.push(arrayA[ai]);
+                    ai++;
+                    bi++;
+                }
+            }
+            return intersection;
+        }
+    }();
+    var Prefetch = function() {
+        "use strict";
+        var keys;
+        keys = {
+            data: "data",
+            protocol: "protocol",
+            thumbprint: "thumbprint"
+        };
+        function Prefetch(o) {
+            this.url = o.url;
+            this.ttl = o.ttl;
+            this.cache = o.cache;
+            this.prepare = o.prepare;
+            this.transform = o.transform;
+            this.transport = o.transport;
+            this.thumbprint = o.thumbprint;
+            this.storage = new PersistentStorage(o.cacheKey);
+        }
+        _.mixin(Prefetch.prototype, {
+            _settings: function settings() {
+                return {
+                    url: this.url,
+                    type: "GET",
+                    dataType: "json"
+                };
+            },
+            store: function store(data) {
+                if (!this.cache) {
+                    return;
+                }
+                this.storage.set(keys.data, data, this.ttl);
+                this.storage.set(keys.protocol, location.protocol, this.ttl);
+                this.storage.set(keys.thumbprint, this.thumbprint, this.ttl);
+            },
+            fromCache: function fromCache() {
+                var stored = {}, isExpired;
+                if (!this.cache) {
+                    return null;
+                }
+                stored.data = this.storage.get(keys.data);
+                stored.protocol = this.storage.get(keys.protocol);
+                stored.thumbprint = this.storage.get(keys.thumbprint);
+                isExpired = stored.thumbprint !== this.thumbprint || stored.protocol !== location.protocol;
+                return stored.data && !isExpired ? stored.data : null;
+            },
+            fromNetwork: function(cb) {
+                var that = this, settings;
+                if (!cb) {
+                    return;
+                }
+                settings = this.prepare(this._settings());
+                this.transport(settings).fail(onError).done(onResponse);
+                function onError() {
+                    cb(true);
+                }
+                function onResponse(resp) {
+                    cb(null, that.transform(resp));
+                }
+            },
+            clear: function clear() {
+                this.storage.clear();
+                return this;
+            }
+        });
+        return Prefetch;
+    }();
+    var Remote = function() {
+        "use strict";
+        function Remote(o) {
+            this.url = o.url;
+            this.prepare = o.prepare;
+            this.transform = o.transform;
+            this.transport = new Transport({
+                cache: o.cache,
+                limiter: o.limiter,
+                transport: o.transport
+            });
+        }
+        _.mixin(Remote.prototype, {
+            _settings: function settings() {
+                return {
+                    url: this.url,
+                    type: "GET",
+                    dataType: "json"
+                };
+            },
+            get: function get(query, cb) {
+                var that = this, settings;
+                if (!cb) {
+                    return;
+                }
+                query = query || "";
+                settings = this.prepare(query, this._settings());
+                return this.transport.get(settings, onResponse);
+                function onResponse(err, resp) {
+                    err ? cb([]) : cb(that.transform(resp));
+                }
+            },
+            cancelLastRequest: function cancelLastRequest() {
+                this.transport.cancel();
+            }
+        });
+        return Remote;
+    }();
+    var oParser = function() {
+        "use strict";
+        return function parse(o) {
+            var defaults, sorter;
+            defaults = {
+                initialize: true,
+                identify: _.stringify,
+                datumTokenizer: null,
+                queryTokenizer: null,
+                sufficient: 5,
+                sorter: null,
+                local: [],
+                prefetch: null,
+                remote: null
+            };
+            o = _.mixin(defaults, o || {});
+            !o.datumTokenizer && $.error("datumTokenizer is required");
+            !o.queryTokenizer && $.error("queryTokenizer is required");
+            sorter = o.sorter;
+            o.sorter = sorter ? function(x) {
+                return x.sort(sorter);
+            } : _.identity;
+            o.local = _.isFunction(o.local) ? o.local() : o.local;
+            o.prefetch = parsePrefetch(o.prefetch);
+            o.remote = parseRemote(o.remote);
+            return o;
+        };
+        function parsePrefetch(o) {
+            var defaults;
+            if (!o) {
+                return null;
+            }
+            defaults = {
+                url: null,
+                ttl: 24 * 60 * 60 * 1e3,
+                cache: true,
+                cacheKey: null,
+                thumbprint: "",
+                prepare: _.identity,
+                transform: _.identity,
+                transport: null
+            };
+            o = _.isString(o) ? {
+                url: o
+            } : o;
+            o = _.mixin(defaults, o);
+            !o.url && $.error("prefetch requires url to be set");
+            o.transform = o.filter || o.transform;
+            o.cacheKey = o.cacheKey || o.url;
+            o.thumbprint = VERSION + o.thumbprint;
+            o.transport = o.transport ? callbackToDeferred(o.transport) : $.ajax;
+            return o;
+        }
+        function parseRemote(o) {
+            var defaults;
+            if (!o) {
+                return;
+            }
+            defaults = {
+                url: null,
+                cache: true,
+                prepare: null,
+                replace: null,
+                wildcard: null,
+                limiter: null,
+                rateLimitBy: "debounce",
+                rateLimitWait: 300,
+                transform: _.identity,
+                transport: null
+            };
+            o = _.isString(o) ? {
+                url: o
+            } : o;
+            o = _.mixin(defaults, o);
+            !o.url && $.error("remote requires url to be set");
+            o.transform = o.filter || o.transform;
+            o.prepare = toRemotePrepare(o);
+            o.limiter = toLimiter(o);
+            o.transport = o.transport ? callbackToDeferred(o.transport) : $.ajax;
+            delete o.replace;
+            delete o.wildcard;
+            delete o.rateLimitBy;
+            delete o.rateLimitWait;
+            return o;
+        }
+        function toRemotePrepare(o) {
+            var prepare, replace, wildcard;
+            prepare = o.prepare;
+            replace = o.replace;
+            wildcard = o.wildcard;
+            if (prepare) {
+                return prepare;
+            }
+            if (replace) {
+                prepare = prepareByReplace;
+            } else if (o.wildcard) {
+                prepare = prepareByWildcard;
+            } else {
+                prepare = idenityPrepare;
+            }
+            return prepare;
+            function prepareByReplace(query, settings) {
+                settings.url = replace(settings.url, query);
+                return settings;
+            }
+            function prepareByWildcard(query, settings) {
+                settings.url = settings.url.replace(wildcard, encodeURIComponent(query));
+                return settings;
+            }
+            function idenityPrepare(query, settings) {
+                return settings;
+            }
+        }
+        function toLimiter(o) {
+            var limiter, method, wait;
+            limiter = o.limiter;
+            method = o.rateLimitBy;
+            wait = o.rateLimitWait;
+            if (!limiter) {
+                limiter = /^throttle$/i.test(method) ? throttle(wait) : debounce(wait);
+            }
+            return limiter;
+            function debounce(wait) {
+                return function debounce(fn) {
+                    return _.debounce(fn, wait);
+                };
+            }
+            function throttle(wait) {
+                return function throttle(fn) {
+                    return _.throttle(fn, wait);
+                };
+            }
+        }
+        function callbackToDeferred(fn) {
+            return function wrapper(o) {
+                var deferred = $.Deferred();
+                fn(o, onSuccess, onError);
+                return deferred;
+                function onSuccess(resp) {
+                    _.defer(function() {
+                        deferred.resolve(resp);
+                    });
+                }
+                function onError(err) {
+                    _.defer(function() {
+                        deferred.reject(err);
+                    });
+                }
+            };
+        }
+    }();
+    var Bloodhound = function() {
+        "use strict";
+        var old;
+        old = window && window.Bloodhound;
+        function Bloodhound(o) {
+            o = oParser(o);
+            this.sorter = o.sorter;
+            this.identify = o.identify;
+            this.sufficient = o.sufficient;
+            this.local = o.local;
+            this.remote = o.remote ? new Remote(o.remote) : null;
+            this.prefetch = o.prefetch ? new Prefetch(o.prefetch) : null;
+            this.index = new SearchIndex({
+                identify: this.identify,
+                datumTokenizer: o.datumTokenizer,
+                queryTokenizer: o.queryTokenizer
+            });
+            o.initialize !== false && this.initialize();
+        }
+        Bloodhound.noConflict = function noConflict() {
+            window && (window.Bloodhound = old);
+            return Bloodhound;
+        };
+        Bloodhound.tokenizers = tokenizers;
+        _.mixin(Bloodhound.prototype, {
+            __ttAdapter: function ttAdapter() {
+                var that = this;
+                return this.remote ? withAsync : withoutAsync;
+                function withAsync(query, sync, async) {
+                    return that.search(query, sync, async);
+                }
+                function withoutAsync(query, sync) {
+                    return that.search(query, sync);
+                }
+            },
+            _loadPrefetch: function loadPrefetch() {
+                var that = this, deferred, serialized;
+                deferred = $.Deferred();
+                if (!this.prefetch) {
+                    deferred.resolve();
+                } else if (serialized = this.prefetch.fromCache()) {
+                    this.index.bootstrap(serialized);
+                    deferred.resolve();
+                } else {
+                    this.prefetch.fromNetwork(done);
+                }
+                return deferred.promise();
+                function done(err, data) {
+                    if (err) {
+                        return deferred.reject();
+                    }
+                    that.add(data);
+                    that.prefetch.store(that.index.serialize());
+                    deferred.resolve();
+                }
+            },
+            _initialize: function initialize() {
+                var that = this, deferred;
+                this.clear();
+                (this.initPromise = this._loadPrefetch()).done(addLocalToIndex);
+                return this.initPromise;
+                function addLocalToIndex() {
+                    that.add(that.local);
+                }
+            },
+            initialize: function initialize(force) {
+                return !this.initPromise || force ? this._initialize() : this.initPromise;
+            },
+            add: function add(data) {
+                this.index.add(data);
+                return this;
+            },
+            get: function get(ids) {
+                ids = _.isArray(ids) ? ids : [].slice.call(arguments);
+                return this.index.get(ids);
+            },
+            search: function search(query, sync, async) {
+                var that = this, local;
+                local = this.sorter(this.index.search(query));
+                sync(this.remote ? local.slice() : local);
+                if (this.remote && local.length < this.sufficient) {
+                    this.remote.get(query, processRemote);
+                } else if (this.remote) {
+                    this.remote.cancelLastRequest();
+                }
+                return this;
+                function processRemote(remote) {
+                    var nonDuplicates = [];
+                    _.each(remote, function(r) {
+                        !_.some(local, function(l) {
+                            return that.identify(r) === that.identify(l);
+                        }) && nonDuplicates.push(r);
+                    });
+                    async && async(nonDuplicates);
+                }
+            },
+            all: function all() {
+                return this.index.all();
+            },
+            clear: function clear() {
+                this.index.reset();
+                return this;
+            },
+            clearPrefetchCache: function clearPrefetchCache() {
+                this.prefetch && this.prefetch.clear();
+                return this;
+            },
+            clearRemoteCache: function clearRemoteCache() {
+                Transport.resetCache();
+                return this;
+            },
+            ttAdapter: function ttAdapter() {
+                return this.__ttAdapter();
+            }
+        });
+        return Bloodhound;
+    }();
+    return Bloodhound;
+});
+
+(function(root, factory) {
+    if (typeof define === "function" && define.amd) {
+        define("typeahead.js", [ "jquery" ], function(a0) {
+            return factory(a0);
+        });
+    } else if (typeof exports === "object") {
+        module.exports = factory(require("jquery"));
+    } else {
+        factory(jQuery);
+    }
+})(this, function($) {
+    var _ = function() {
+        "use strict";
+        return {
+            isMsie: function() {
+                return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false;
+            },
+            isBlankString: function(str) {
+                return !str || /^\s*$/.test(str);
+            },
+            escapeRegExChars: function(str) {
+                return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&");
+            },
+            isString: function(obj) {
+                return typeof obj === "string";
+            },
+            isNumber: function(obj) {
+                return typeof obj === "number";
+            },
+            isArray: $.isArray,
+            isFunction: $.isFunction,
+            isObject: $.isPlainObject,
+            isUndefined: function(obj) {
+                return typeof obj === "undefined";
+            },
+            isElement: function(obj) {
+                return !!(obj && obj.nodeType === 1);
+            },
+            isJQuery: function(obj) {
+                return obj instanceof $;
+            },
+            toStr: function toStr(s) {
+                return _.isUndefined(s) || s === null ? "" : s + "";
+            },
+            bind: $.proxy,
+            each: function(collection, cb) {
+                $.each(collection, reverseArgs);
+                function reverseArgs(index, value) {
+                    return cb(value, index);
+                }
+            },
+            map: $.map,
+            filter: $.grep,
+            every: function(obj, test) {
+                var result = true;
+                if (!obj) {
+                    return result;
+                }
+                $.each(obj, function(key, val) {
+                    if (!(result = test.call(null, val, key, obj))) {
+                        return false;
+                    }
+                });
+                return !!result;
+            },
+            some: function(obj, test) {
+                var result = false;
+                if (!obj) {
+                    return result;
+                }
+                $.each(obj, function(key, val) {
+                    if (result = test.call(null, val, key, obj)) {
+                        return false;
+                    }
+                });
+                return !!result;
+            },
+            mixin: $.extend,
+            identity: function(x) {
+                return x;
+            },
+            clone: function(obj) {
+                return $.extend(true, {}, obj);
+            },
+            getIdGenerator: function() {
+                var counter = 0;
+                return function() {
+                    return counter++;
+                };
+            },
+            templatify: function templatify(obj) {
+                return $.isFunction(obj) ? obj : template;
+                function template() {
+                    return String(obj);
+                }
+            },
+            defer: function(fn) {
+                setTimeout(fn, 0);
+            },
+            debounce: function(func, wait, immediate) {
+                var timeout, result;
+                return function() {
+                    var context = this, args = arguments, later, callNow;
+                    later = function() {
+                        timeout = null;
+                        if (!immediate) {
+                            result = func.apply(context, args);
+                        }
+                    };
+                    callNow = immediate && !timeout;
+                    clearTimeout(timeout);
+                    timeout = setTimeout(later, wait);
+                    if (callNow) {
+                        result = func.apply(context, args);
+                    }
+                    return result;
+                };
+            },
+            throttle: function(func, wait) {
+                var context, args, timeout, result, previous, later;
+                previous = 0;
+                later = function() {
+                    previous = new Date();
+                    timeout = null;
+                    result = func.apply(context, args);
+                };
+                return function() {
+                    var now = new Date(), remaining = wait - (now - previous);
+                    context = this;
+                    args = arguments;
+                    if (remaining <= 0) {
+                        clearTimeout(timeout);
+                        timeout = null;
+                        previous = now;
+                        result = func.apply(context, args);
+                    } else if (!timeout) {
+                        timeout = setTimeout(later, remaining);
+                    }
+                    return result;
+                };
+            },
+            stringify: function(val) {
+                return _.isString(val) ? val : JSON.stringify(val);
+            },
+            noop: function() {}
+        };
+    }();
+    var WWW = function() {
+        "use strict";
+        var defaultClassNames = {
+            wrapper: "twitter-typeahead",
+            input: "tt-input",
+            hint: "tt-hint",
+            menu: "tt-menu",
+            dataset: "tt-dataset",
+            suggestion: "tt-suggestion",
+            selectable: "tt-selectable",
+            empty: "tt-empty",
+            open: "tt-open",
+            cursor: "tt-cursor",
+            highlight: "tt-highlight"
+        };
+        return build;
+        function build(o) {
+            var www, classes;
+            classes = _.mixin({}, defaultClassNames, o);
+            www = {
+                css: buildCss(),
+                classes: classes,
+                html: buildHtml(classes),
+                selectors: buildSelectors(classes)
+            };
+            return {
+                css: www.css,
+                html: www.html,
+                classes: www.classes,
+                selectors: www.selectors,
+                mixin: function(o) {
+                    _.mixin(o, www);
+                }
+            };
+        }
+        function buildHtml(c) {
+            return {
+                wrapper: '<span class="' + c.wrapper + '"></span>',
+                menu: '<div class="' + c.menu + '"></div>'
+            };
+        }
+        function buildSelectors(classes) {
+            var selectors = {};
+            _.each(classes, function(v, k) {
+                selectors[k] = "." + v;
+            });
+            return selectors;
+        }
+        function buildCss() {
+            var css = {
+                wrapper: {
+                    position: "relative",
+                    display: "inline-block"
+                },
+                hint: {
+                    position: "absolute",
+                    top: "0",
+                    left: "0",
+                    borderColor: "transparent",
+                    boxShadow: "none",
+                    opacity: "1"
+                },
+                input: {
+                    position: "relative",
+                    verticalAlign: "top",
+                    backgroundColor: "transparent"
+                },
+                inputWithNoHint: {
+                    position: "relative",
+                    verticalAlign: "top"
+                },
+                menu: {
+                    position: "absolute",
+                    top: "100%",
+                    left: "0",
+                    zIndex: "100",
+                    display: "none"
+                },
+                ltr: {
+                    left: "0",
+                    right: "auto"
+                },
+                rtl: {
+                    left: "auto",
+                    right: " 0"
+                }
+            };
+            if (_.isMsie()) {
+                _.mixin(css.input, {
+                    backgroundImage: "url(data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7)"
+                });
+            }
+            return css;
+        }
+    }();
+    var EventBus = function() {
+        "use strict";
+        var namespace, deprecationMap;
+        namespace = "typeahead:";
+        deprecationMap = {
+            render: "rendered",
+            cursorchange: "cursorchanged",
+            select: "selected",
+            autocomplete: "autocompleted"
+        };
+        function EventBus(o) {
+            if (!o || !o.el) {
+                $.error("EventBus initialized without el");
+            }
+            this.$el = $(o.el);
+        }
+        _.mixin(EventBus.prototype, {
+            _trigger: function(type, args) {
+                var $e;
+                $e = $.Event(namespace + type);
+                (args = args || []).unshift($e);
+                this.$el.trigger.apply(this.$el, args);
+                return $e;
+            },
+            before: function(type) {
+                var args, $e;
+                args = [].slice.call(arguments, 1);
+                $e = this._trigger("before" + type, args);
+                return $e.isDefaultPrevented();
+            },
+            trigger: function(type) {
+                var deprecatedType;
+                this._trigger(type, [].slice.call(arguments, 1));
+                if (deprecatedType = deprecationMap[type]) {
+                    this._trigger(deprecatedType, [].slice.call(arguments, 1));
+                }
+            }
+        });
+        return EventBus;
+    }();
+    var EventEmitter = function() {
+        "use strict";
+        var splitter = /\s+/, nextTick = getNextTick();
+        return {
+            onSync: onSync,
+            onAsync: onAsync,
+            off: off,
+            trigger: trigger
+        };
+        function on(method, types, cb, context) {
+            var type;
+            if (!cb) {
+                return this;
+            }
+            types = types.split(splitter);
+            cb = context ? bindContext(cb, context) : cb;
+            this._callbacks = this._callbacks || {};
+            while (type = types.shift()) {
+                this._callbacks[type] = this._callbacks[type] || {
+                    sync: [],
+                    async: []
+                };
+                this._callbacks[type][method].push(cb);
+            }
+            return this;
+        }
+        function onAsync(types, cb, context) {
+            return on.call(this, "async", types, cb, context);
+        }
+        function onSync(types, cb, context) {
+            return on.call(this, "sync", types, cb, context);
+        }
+        function off(types) {
+            var type;
+            if (!this._callbacks) {
+                return this;
+            }
+            types = types.split(splitter);
+            while (type = types.shift()) {
+                delete this._callbacks[type];
+            }
+            return this;
+        }
+        function trigger(types) {
+            var type, callbacks, args, syncFlush, asyncFlush;
+            if (!this._callbacks) {
+                return this;
+            }
+            types = types.split(splitter);
+            args = [].slice.call(arguments, 1);
+            while ((type = types.shift()) && (callbacks = this._callbacks[type])) {
+                syncFlush = getFlush(callbacks.sync, this, [ type ].concat(args));
+                asyncFlush = getFlush(callbacks.async, this, [ type ].concat(args));
+                syncFlush() && nextTick(asyncFlush);
+            }
+            return this;
+        }
+        function getFlush(callbacks, context, args) {
+            return flush;
+            function flush() {
+                var cancelled;
+                for (var i = 0, len = callbacks.length; !cancelled && i < len; i += 1) {
+                    cancelled = callbacks[i].apply(context, args) === false;
+                }
+                return !cancelled;
+            }
+        }
+        function getNextTick() {
+            var nextTickFn;
+            if (window.setImmediate) {
+                nextTickFn = function nextTickSetImmediate(fn) {
+                    setImmediate(function() {
+                        fn();
+                    });
+                };
+            } else {
+                nextTickFn = function nextTickSetTimeout(fn) {
+                    setTimeout(function() {
+                        fn();
+                    }, 0);
+                };
+            }
+            return nextTickFn;
+        }
+        function bindContext(fn, context) {
+            return fn.bind ? fn.bind(context) : function() {
+                fn.apply(context, [].slice.call(arguments, 0));
+            };
+        }
+    }();
+    var highlight = function(doc) {
+        "use strict";
+        var defaults = {
+            node: null,
+            pattern: null,
+            tagName: "strong",
+            className: null,
+            wordsOnly: false,
+            caseSensitive: false
+        };
+        return function hightlight(o) {
+            var regex;
+            o = _.mixin({}, defaults, o);
+            if (!o.node || !o.pattern) {
+                return;
+            }
+            o.pattern = _.isArray(o.pattern) ? o.pattern : [ o.pattern ];
+            regex = getRegex(o.pattern, o.caseSensitive, o.wordsOnly);
+            traverse(o.node, hightlightTextNode);
+            function hightlightTextNode(textNode) {
+                var match, patternNode, wrapperNode;
+                if (match = regex.exec(textNode.data)) {
+                    wrapperNode = doc.createElement(o.tagName);
+                    o.className && (wrapperNode.className = o.className);
+                    patternNode = textNode.splitText(match.index);
+                    patternNode.splitText(match[0].length);
+                    wrapperNode.appendChild(patternNode.cloneNode(true));
+                    textNode.parentNode.replaceChild(wrapperNode, patternNode);
+                }
+                return !!match;
+            }
+            function traverse(el, hightlightTextNode) {
+                var childNode, TEXT_NODE_TYPE = 3;
+                for (var i = 0; i < el.childNodes.length; i++) {
+                    childNode = el.childNodes[i];
+                    if (childNode.nodeType === TEXT_NODE_TYPE) {
+                        i += hightlightTextNode(childNode) ? 1 : 0;
+                    } else {
+                        traverse(childNode, hightlightTextNode);
+                    }
+                }
+            }
+        };
+        function getRegex(patterns, caseSensitive, wordsOnly) {
+            var escapedPatterns = [], regexStr;
+            for (var i = 0, len = patterns.length; i < len; i++) {
+                escapedPatterns.push(_.escapeRegExChars(patterns[i]));
+            }
+            regexStr = wordsOnly ? "\\b(" + escapedPatterns.join("|") + ")\\b" : "(" + escapedPatterns.join("|") + ")";
+            return caseSensitive ? new RegExp(regexStr) : new RegExp(regexStr, "i");
+        }
+    }(window.document);
+    var Input = function() {
+        "use strict";
+        var specialKeyCodeMap;
+        specialKeyCodeMap = {
+            9: "tab",
+            27: "esc",
+            37: "left",
+            39: "right",
+            13: "enter",
+            38: "up",
+            40: "down"
+        };
+        function Input(o, www) {
+            o = o || {};
+            if (!o.input) {
+                $.error("input is missing");
+            }
+            www.mixin(this);
+            this.$hint = $(o.hint);
+            this.$input = $(o.input);
+            this.query = this.$input.val();
+            this.queryWhenFocused = this.hasFocus() ? this.query : null;
+            this.$overflowHelper = buildOverflowHelper(this.$input);
+            this._checkLanguageDirection();
+            if (this.$hint.length === 0) {
+                this.setHint = this.getHint = this.clearHint = this.clearHintIfInvalid = _.noop;
+            }
+        }
+        Input.normalizeQuery = function(str) {
+            return _.toStr(str).replace(/^\s*/g, "").replace(/\s{2,}/g, " ");
+        };
+        _.mixin(Input.prototype, EventEmitter, {
+            _onBlur: function onBlur() {
+                this.resetInputValue();
+                this.trigger("blurred");
+            },
+            _onFocus: function onFocus() {
+                this.queryWhenFocused = this.query;
+                this.trigger("focused");
+            },
+            _onKeydown: function onKeydown($e) {
+                var keyName = specialKeyCodeMap[$e.which || $e.keyCode];
+                this._managePreventDefault(keyName, $e);
+                if (keyName && this._shouldTrigger(keyName, $e)) {
+                    this.trigger(keyName + "Keyed", $e);
+                }
+            },
+            _onInput: function onInput() {
+                this._setQuery(this.getInputValue());
+                this.clearHintIfInvalid();
+                this._checkLanguageDirection();
+            },
+            _managePreventDefault: function managePreventDefault(keyName, $e) {
+                var preventDefault;
+                switch (keyName) {
+                  case "up":
+                  case "down":
+                    preventDefault = !withModifier($e);
+                    break;
+
+                  default:
+                    preventDefault = false;
+                }
+                preventDefault && $e.preventDefault();
+            },
+            _shouldTrigger: function shouldTrigger(keyName, $e) {
+                var trigger;
+                switch (keyName) {
+                  case "tab":
+                    trigger = !withModifier($e);
+                    break;
+
+                  default:
+                    trigger = true;
+                }
+                return trigger;
+            },
+            _checkLanguageDirection: function checkLanguageDirection() {
+                var dir = (this.$input.css("direction") || "ltr").toLowerCase();
+                if (this.dir !== dir) {
+                    this.dir = dir;
+                    this.$hint.attr("dir", dir);
+                    this.trigger("langDirChanged", dir);
+                }
+            },
+            _setQuery: function setQuery(val, silent) {
+                var areEquivalent, hasDifferentWhitespace;
+                areEquivalent = areQueriesEquivalent(val, this.query);
+                hasDifferentWhitespace = areEquivalent ? this.query.length !== val.length : false;
+                this.query = val;
+                if (!silent && !areEquivalent) {
+                    this.trigger("queryChanged", this.query);
+                } else if (!silent && hasDifferentWhitespace) {
+                    this.trigger("whitespaceChanged", this.query);
+                }
+            },
+            bind: function() {
+                var that = this, onBlur, onFocus, onKeydown, onInput;
+                onBlur = _.bind(this._onBlur, this);
+                onFocus = _.bind(this._onFocus, this);
+                onKeydown = _.bind(this._onKeydown, this);
+                onInput = _.bind(this._onInput, this);
+                this.$input.on("blur.tt", onBlur).on("focus.tt", onFocus).on("keydown.tt", onKeydown);
+                if (!_.isMsie() || _.isMsie() > 9) {
+                    this.$input.on("input.tt", onInput);
+                } else {
+                    this.$input.on("keydown.tt keypress.tt cut.tt paste.tt", function($e) {
+                        if (specialKeyCodeMap[$e.which || $e.keyCode]) {
+                            return;
+                        }
+                        _.defer(_.bind(that._onInput, that, $e));
+                    });
+                }
+                return this;
+            },
+            focus: function focus() {
+                this.$input.focus();
+            },
+            blur: function blur() {
+                this.$input.blur();
+            },
+            getLangDir: function getLangDir() {
+                return this.dir;
+            },
+            getQuery: function getQuery() {
+                return this.query || "";
+            },
+            setQuery: function setQuery(val, silent) {
+                this.setInputValue(val);
+                this._setQuery(val, silent);
+            },
+            hasQueryChangedSinceLastFocus: function hasQueryChangedSinceLastFocus() {
+                return this.query !== this.queryWhenFocused;
+            },
+            getInputValue: function getInputValue() {
+                return this.$input.val();
+            },
+            setInputValue: function setInputValue(value) {
+                this.$input.val(value);
+                this.clearHintIfInvalid();
+                this._checkLanguageDirection();
+            },
+            resetInputValue: function resetInputValue() {
+                this.setInputValue(this.query);
+            },
+            getHint: function getHint() {
+                return this.$hint.val();
+            },
+            setHint: function setHint(value) {
+                this.$hint.val(value);
+            },
+            clearHint: function clearHint() {
+                this.setHint("");
+            },
+            clearHintIfInvalid: function clearHintIfInvalid() {
+                var val, hint, valIsPrefixOfHint, isValid;
+                val = this.getInputValue();
+                hint = this.getHint();
+                valIsPrefixOfHint = val !== hint && hint.indexOf(val) === 0;
+                isValid = val !== "" && valIsPrefixOfHint && !this.hasOverflow();
+                !isValid && this.clearHint();
+            },
+            hasFocus: function hasFocus() {
+                return this.$input.is(":focus");
+            },
+            hasOverflow: function hasOverflow() {
+                var constraint = this.$input.width() - 2;
+                this.$overflowHelper.text(this.getInputValue());
+                return this.$overflowHelper.width() >= constraint;
+            },
+            isCursorAtEnd: function() {
+                var valueLength, selectionStart, range;
+                valueLength = this.$input.val().length;
+                selectionStart = this.$input[0].selectionStart;
+                if (_.isNumber(selectionStart)) {
+                    return selectionStart === valueLength;
+                } else if (document.selection) {
+                    range = document.selection.createRange();
+                    range.moveStart("character", -valueLength);
+                    return valueLength === range.text.length;
+                }
+                return true;
+            },
+            destroy: function destroy() {
+                this.$hint.off(".tt");
+                this.$input.off(".tt");
+                this.$overflowHelper.remove();
+                this.$hint = this.$input = this.$overflowHelper = $("<div>");
+            }
+        });
+        return Input;
+        function buildOverflowHelper($input) {
+            return $('<pre aria-hidden="true"></pre>').css({
+                position: "absolute",
+                visibility: "hidden",
+                whiteSpace: "pre",
+                fontFamily: $input.css("font-family"),
+                fontSize: $input.css("font-size"),
+                fontStyle: $input.css("font-style"),
+                fontVariant: $input.css("font-variant"),
+                fontWeight: $input.css("font-weight"),
+                wordSpacing: $input.css("word-spacing"),
+                letterSpacing: $input.css("letter-spacing"),
+                textIndent: $input.css("text-indent"),
+                textRendering: $input.css("text-rendering"),
+                textTransform: $input.css("text-transform")
+            }).insertAfter($input);
+        }
+        function areQueriesEquivalent(a, b) {
+            return Input.normalizeQuery(a) === Input.normalizeQuery(b);
+        }
+        function withModifier($e) {
+            return $e.altKey || $e.ctrlKey || $e.metaKey || $e.shiftKey;
+        }
+    }();
+    var Dataset = function() {
+        "use strict";
+        var keys, nameGenerator;
+        keys = {
+            val: "tt-selectable-display",
+            obj: "tt-selectable-object"
+        };
+        nameGenerator = _.getIdGenerator();
+        function Dataset(o, www) {
+            o = o || {};
+            o.templates = o.templates || {};
+            o.templates.notFound = o.templates.notFound || o.templates.empty;
+            if (!o.source) {
+                $.error("missing source");
+            }
+            if (!o.node) {
+                $.error("missing node");
+            }
+            if (o.name && !isValidName(o.name)) {
+                $.error("invalid dataset name: " + o.name);
+            }
+            www.mixin(this);
+            this.highlight = !!o.highlight;
+            this.name = o.name || nameGenerator();
+            this.limit = o.limit || 5;
+            this.displayFn = getDisplayFn(o.display || o.displayKey);
+            this.templates = getTemplates(o.templates, this.displayFn);
+            this.source = o.source.__ttAdapter ? o.source.__ttAdapter() : o.source;
+            this.async = _.isUndefined(o.async) ? this.source.length > 2 : !!o.async;
+            this._resetLastSuggestion();
+            this.$el = $(o.node).addClass(this.classes.dataset).addClass(this.classes.dataset + "-" + this.name);
+        }
+        Dataset.extractData = function extractData(el) {
+            var $el = $(el);
+            if ($el.data(keys.obj)) {
+                return {
+                    val: $el.data(keys.val) || "",
+                    obj: $el.data(keys.obj) || null
+                };
+            }
+            return null;
+        };
+        _.mixin(Dataset.prototype, EventEmitter, {
+            _overwrite: function overwrite(query, suggestions) {
+                suggestions = suggestions || [];
+                if (suggestions.length) {
+                    this._renderSuggestions(query, suggestions);
+                } else if (this.async && this.templates.pending) {
+                    this._renderPending(query);
+                } else if (!this.async && this.templates.notFound) {
+                    this._renderNotFound(query);
+                } else {
+                    this._empty();
+                }
+                this.trigger("rendered", this.name, suggestions, false);
+            },
+            _append: function append(query, suggestions) {
+                suggestions = suggestions || [];
+                if (suggestions.length && this.$lastSuggestion.length) {
+                    this._appendSuggestions(query, suggestions);
+                } else if (suggestions.length) {
+                    this._renderSuggestions(query, suggestions);
+                } else if (!this.$lastSuggestion.length && this.templates.notFound) {
+                    this._renderNotFound(query);
+                }
+                this.trigger("rendered", this.name, suggestions, true);
+            },
+            _renderSuggestions: function renderSuggestions(query, suggestions) {
+                var $fragment;
+                $fragment = this._getSuggestionsFragment(query, suggestions);
+                this.$lastSuggestion = $fragment.children().last();
+                this.$el.html($fragment).prepend(this._getHeader(query, suggestions)).append(this._getFooter(query, suggestions));
+            },
+            _appendSuggestions: function appendSuggestions(query, suggestions) {
+                var $fragment, $lastSuggestion;
+                $fragment = this._getSuggestionsFragment(query, suggestions);
+                $lastSuggestion = $fragment.children().last();
+                this.$lastSuggestion.after($fragment);
+                this.$lastSuggestion = $lastSuggestion;
+            },
+            _renderPending: function renderPending(query) {
+                var template = this.templates.pending;
+                this._resetLastSuggestion();
+                template && this.$el.html(template({
+                    query: query,
+                    dataset: this.name
+                }));
+            },
+            _renderNotFound: function renderNotFound(query) {
+                var template = this.templates.notFound;
+                this._resetLastSuggestion();
+                template && this.$el.html(template({
+                    query: query,
+                    dataset: this.name
+                }));
+            },
+            _empty: function empty() {
+                this.$el.empty();
+                this._resetLastSuggestion();
+            },
+            _getSuggestionsFragment: function getSuggestionsFragment(query, suggestions) {
+                var that = this, fragment;
+                fragment = document.createDocumentFragment();
+                _.each(suggestions, function getSuggestionNode(suggestion) {
+                    var $el, context;
+                    context = that._injectQuery(query, suggestion);
+                    $el = $(that.templates.suggestion(context)).data(keys.obj, suggestion).data(keys.val, that.displayFn(suggestion)).addClass(that.classes.suggestion + " " + that.classes.selectable);
+                    fragment.appendChild($el[0]);
+                });
+                this.highlight && highlight({
+                    className: this.classes.highlight,
+                    node: fragment,
+                    pattern: query
+                });
+                return $(fragment);
+            },
+            _getFooter: function getFooter(query, suggestions) {
+                return this.templates.footer ? this.templates.footer({
+                    query: query,
+                    suggestions: suggestions,
+                    dataset: this.name
+                }) : null;
+            },
+            _getHeader: function getHeader(query, suggestions) {
+                return this.templates.header ? this.templates.header({
+                    query: query,
+                    suggestions: suggestions,
+                    dataset: this.name
+                }) : null;
+            },
+            _resetLastSuggestion: function resetLastSuggestion() {
+                this.$lastSuggestion = $();
+            },
+            _injectQuery: function injectQuery(query, obj) {
+                return _.isObject(obj) ? _.mixin({
+                    _query: query
+                }, obj) : obj;
+            },
+            update: function update(query) {
+                var that = this, canceled = false, syncCalled = false, rendered = 0;
+                this.cancel();
+                this.cancel = function cancel() {
+                    canceled = true;
+                    that.cancel = $.noop;
+                    that.async && that.trigger("asyncCanceled", query);
+                };
+                this.source(query, sync, async);
+                !syncCalled && sync([]);
+                function sync(suggestions) {
+                    if (syncCalled) {
+                        return;
+                    }
+                    syncCalled = true;
+                    suggestions = (suggestions || []).slice(0, that.limit);
+                    rendered = suggestions.length;
+                    that._overwrite(query, suggestions);
+                    if (rendered < that.limit && that.async) {
+                        that.trigger("asyncRequested", query);
+                    }
+                }
+                function async(suggestions) {
+                    suggestions = suggestions || [];
+                    if (!canceled && rendered < that.limit) {
+                        that.cancel = $.noop;
+                        rendered += suggestions.length;
+                        that._append(query, suggestions.slice(0, that.limit - rendered));
+                        that.async && that.trigger("asyncReceived", query);
+                    }
+                }
+            },
+            cancel: $.noop,
+            clear: function clear() {
+                this._empty();
+                this.cancel();
+                this.trigger("cleared");
+            },
+            isEmpty: function isEmpty() {
+                return this.$el.is(":empty");
+            },
+            destroy: function destroy() {
+                this.$el = $("<div>");
+            }
+        });
+        return Dataset;
+        function getDisplayFn(display) {
+            display = display || _.stringify;
+            return _.isFunction(display) ? display : displayFn;
+            function displayFn(obj) {
+                return obj[display];
+            }
+        }
+        function getTemplates(templates, displayFn) {
+            return {
+                notFound: templates.notFound && _.templatify(templates.notFound),
+                pending: templates.pending && _.templatify(templates.pending),
+                header: templates.header && _.templatify(templates.header),
+                footer: templates.footer && _.templatify(templates.footer),
+                suggestion: templates.suggestion || suggestionTemplate
+            };
+            function suggestionTemplate(context) {
+                return $("<div>").text(displayFn(context));
+            }
+        }
+        function isValidName(str) {
+            return /^[_a-zA-Z0-9-]+$/.test(str);
+        }
+    }();
+    var Menu = function() {
+        "use strict";
+        function Menu(o, www) {
+            var that = this;
+            o = o || {};
+            if (!o.node) {
+                $.error("node is required");
+            }
+            www.mixin(this);
+            this.$node = $(o.node);
+            this.query = null;
+            this.datasets = _.map(o.datasets, initializeDataset);
+            function initializeDataset(oDataset) {
+                var node = that.$node.find(oDataset.node).first();
+                oDataset.node = node.length ? node : $("<div>").appendTo(that.$node);
+                return new Dataset(oDataset, www);
+            }
+        }
+        _.mixin(Menu.prototype, EventEmitter, {
+            _onSelectableClick: function onSelectableClick($e) {
+                this.trigger("selectableClicked", $($e.currentTarget));
+            },
+            _onRendered: function onRendered(type, dataset, suggestions, async) {
+                this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty());
+                this.trigger("datasetRendered", dataset, suggestions, async);
+            },
+            _onCleared: function onCleared() {
+                this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty());
+                this.trigger("datasetCleared");
+            },
+            _propagate: function propagate() {
+                this.trigger.apply(this, arguments);
+            },
+            _allDatasetsEmpty: function allDatasetsEmpty() {
+                return _.every(this.datasets, isDatasetEmpty);
+                function isDatasetEmpty(dataset) {
+                    return dataset.isEmpty();
+                }
+            },
+            _getSelectables: function getSelectables() {
+                return this.$node.find(this.selectors.selectable);
+            },
+            _removeCursor: function _removeCursor() {
+                var $selectable = this.getActiveSelectable();
+                $selectable && $selectable.removeClass(this.classes.cursor);
+            },
+            _ensureVisible: function ensureVisible($el) {
+                var elTop, elBottom, nodeScrollTop, nodeHeight;
+                elTop = $el.position().top;
+                elBottom = elTop + $el.outerHeight(true);
+                nodeScrollTop = this.$node.scrollTop();
+                nodeHeight = this.$node.height() + parseInt(this.$node.css("paddingTop"), 10) + parseInt(this.$node.css("paddingBottom"), 10);
+                if (elTop < 0) {
+                    this.$node.scrollTop(nodeScrollTop + elTop);
+                } else if (nodeHeight < elBottom) {
+                    this.$node.scrollTop(nodeScrollTop + (elBottom - nodeHeight));
+                }
+            },
+            bind: function() {
+                var that = this, onSelectableClick;
+                onSelectableClick = _.bind(this._onSelectableClick, this);
+                this.$node.on("click.tt", this.selectors.selectable, onSelectableClick);
+                _.each(this.datasets, function(dataset) {
+                    dataset.onSync("asyncRequested", that._propagate, that).onSync("asyncCanceled", that._propagate, that).onSync("asyncReceived", that._propagate, that).onSync("rendered", that._onRendered, that).onSync("cleared", that._onCleared, that);
+                });
+                return this;
+            },
+            isOpen: function isOpen() {
+                return this.$node.hasClass(this.classes.open);
+            },
+            open: function open() {
+                this.$node.addClass(this.classes.open);
+            },
+            close: function close() {
+                this.$node.removeClass(this.classes.open);
+                this._removeCursor();
+            },
+            setLanguageDirection: function setLanguageDirection(dir) {
+                this.$node.attr("dir", dir);
+            },
+            selectableRelativeToCursor: function selectableRelativeToCursor(delta) {
+                var $selectables, $oldCursor, oldIndex, newIndex;
+                $oldCursor = this.getActiveSelectable();
+                $selectables = this._getSelectables();
+                oldIndex = $oldCursor ? $selectables.index($oldCursor) : -1;
+                newIndex = oldIndex + delta;
+                newIndex = (newIndex + 1) % ($selectables.length + 1) - 1;
+                newIndex = newIndex < -1 ? $selectables.length - 1 : newIndex;
+                return newIndex === -1 ? null : $selectables.eq(newIndex);
+            },
+            setCursor: function setCursor($selectable) {
+                this._removeCursor();
+                if ($selectable = $selectable && $selectable.first()) {
+                    $selectable.addClass(this.classes.cursor);
+                    this._ensureVisible($selectable);
+                }
+            },
+            getSelectableData: function getSelectableData($el) {
+                return $el && $el.length ? Dataset.extractData($el) : null;
+            },
+            getActiveSelectable: function getActiveSelectable() {
+                var $selectable = this._getSelectables().filter(this.selectors.cursor).first();
+                return $selectable.length ? $selectable : null;
+            },
+            getTopSelectable: function getTopSelectable() {
+                var $selectable = this._getSelectables().first();
+                return $selectable.length ? $selectable : null;
+            },
+            update: function update(query) {
+                var isValidUpdate = query !== this.query;
+                if (isValidUpdate) {
+                    this.query = query;
+                    _.each(this.datasets, updateDataset);
+                }
+                return isValidUpdate;
+                function updateDataset(dataset) {
+                    dataset.update(query);
+                }
+            },
+            empty: function empty() {
+                _.each(this.datasets, clearDataset);
+                this.query = null;
+                this.$node.addClass(this.classes.empty);
+                function clearDataset(dataset) {
+                    dataset.clear();
+                }
+            },
+            destroy: function destroy() {
+                this.$node.off(".tt");
+                this.$node = $("<div>");
+                _.each(this.datasets, destroyDataset);
+                function destroyDataset(dataset) {
+                    dataset.destroy();
+                }
+            }
+        });
+        return Menu;
+    }();
+    var DefaultMenu = function() {
+        "use strict";
+        var s = Menu.prototype;
+        function DefaultMenu() {
+            Menu.apply(this, [].slice.call(arguments, 0));
+        }
+        _.mixin(DefaultMenu.prototype, Menu.prototype, {
+            open: function open() {
+                !this._allDatasetsEmpty() && this._show();
+                return s.open.apply(this, [].slice.call(arguments, 0));
+            },
+            close: function close() {
+                this._hide();
+                return s.close.apply(this, [].slice.call(arguments, 0));
+            },
+            _onRendered: function onRendered() {
+                if (this._allDatasetsEmpty()) {
+                    this._hide();
+                } else {
+                    this.isOpen() && this._show();
+                }
+                return s._onRendered.apply(this, [].slice.call(arguments, 0));
+            },
+            _onCleared: function onCleared() {
+                if (this._allDatasetsEmpty()) {
+                    this._hide();
+                } else {
+                    this.isOpen() && this._show();
+                }
+                return s._onCleared.apply(this, [].slice.call(arguments, 0));
+            },
+            setLanguageDirection: function setLanguageDirection(dir) {
+                this.$node.css(dir === "ltr" ? this.css.ltr : this.css.rtl);
+                return s.setLanguageDirection.apply(this, [].slice.call(arguments, 0));
+            },
+            _hide: function hide() {
+                this.$node.hide();
+            },
+            _show: function show() {
+                this.$node.css("display", "block");
+            }
+        });
+        return DefaultMenu;
+    }();
+    var Typeahead = function() {
+        "use strict";
+        function Typeahead(o, www) {
+            var onFocused, onBlurred, onEnterKeyed, onTabKeyed, onEscKeyed, onUpKeyed, onDownKeyed, onLeftKeyed, onRightKeyed, onQueryChanged, onWhitespaceChanged;
+            o = o || {};
+            if (!o.input) {
+                $.error("missing input");
+            }
+            if (!o.menu) {
+                $.error("missing menu");
+            }
+            if (!o.eventBus) {
+                $.error("missing event bus");
+            }
+            www.mixin(this);
+            this.eventBus = o.eventBus;
+            this.minLength = _.isNumber(o.minLength) ? o.minLength : 1;
+            this.input = o.input;
+            this.menu = o.menu;
+            this.enabled = true;
+            this.active = false;
+            this.input.hasFocus() && this.activate();
+            this.dir = this.input.getLangDir();
+            this._hacks();
+            this.menu.bind().onSync("selectableClicked", this._onSelectableClicked, this).onSync("asyncRequested", this._onAsyncRequested, this).onSync("asyncCanceled", this._onAsyncCanceled, this).onSync("asyncReceived", this._onAsyncReceived, this).onSync("datasetRendered", this._onDatasetRendered, this).onSync("datasetCleared", this._onDatasetCleared, this);
+            onFocused = c(this, "activate", "open", "_onFocused");
+            onBlurred = c(this, "deactivate", "_onBlurred");
+            onEnterKeyed = c(this, "isActive", "isOpen", "_onEnterKeyed");
+            onTabKeyed = c(this, "isActive", "isOpen", "_onTabKeyed");
+            onEscKeyed = c(this, "isActive", "_onEscKeyed");
+            onUpKeyed = c(this, "isActive", "open", "_onUpKeyed");
+            onDownKeyed = c(this, "isActive", "open", "_onDownKeyed");
+            onLeftKeyed = c(this, "isActive", "isOpen", "_onLeftKeyed");
+            onRightKeyed = c(this, "isActive", "isOpen", "_onRightKeyed");
+            onQueryChanged = c(this, "_openIfActive", "_onQueryChanged");
+            onWhitespaceChanged = c(this, "_openIfActive", "_onWhitespaceChanged");
+            this.input.bind().onSync("focused", onFocused, this).onSync("blurred", onBlurred, this).onSync("enterKeyed", onEnterKeyed, this).onSync("tabKeyed", onTabKeyed, this).onSync("escKeyed", onEscKeyed, this).onSync("upKeyed", onUpKeyed, this).onSync("downKeyed", onDownKeyed, this).onSync("leftKeyed", onLeftKeyed, this).onSync("rightKeyed", onRightKeyed, this).onSync("queryChanged", onQueryChanged, this).onSync("whitespaceChanged", onWhitespaceChanged, this).onSync("langDirChanged", this._onLangDirChanged, this);
+        }
+        _.mixin(Typeahead.prototype, {
+            _hacks: function hacks() {
+                var $input, $menu;
+                $input = this.input.$input || $("<div>");
+                $menu = this.menu.$node || $("<div>");
+                $input.on("blur.tt", function($e) {
+                    var active, isActive, hasActive;
+                    active = document.activeElement;
+                    isActive = $menu.is(active);
+                    hasActive = $menu.has(active).length > 0;
+                    if (_.isMsie() && (isActive || hasActive)) {
+                        $e.preventDefault();
+                        $e.stopImmediatePropagation();
+                        _.defer(function() {
+                            $input.focus();
+                        });
+                    }
+                });
+                $menu.on("mousedown.tt", function($e) {
+                    $e.preventDefault();
+                });
+            },
+            _onSelectableClicked: function onSelectableClicked(type, $el) {
+                this.select($el);
+            },
+            _onDatasetCleared: function onDatasetCleared() {
+                this._updateHint();
+            },
+            _onDatasetRendered: function onDatasetRendered(type, dataset, suggestions, async) {
+                this._updateHint();
+                this.eventBus.trigger("render", suggestions, async, dataset);
+            },
+            _onAsyncRequested: function onAsyncRequested(type, dataset, query) {
+                this.eventBus.trigger("asyncrequest", query, dataset);
+            },
+            _onAsyncCanceled: function onAsyncCanceled(type, dataset, query) {
+                this.eventBus.trigger("asynccancel", query, dataset);
+            },
+            _onAsyncReceived: function onAsyncReceived(type, dataset, query) {
+                this.eventBus.trigger("asyncreceive", query, dataset);
+            },
+            _onFocused: function onFocused() {
+                this._minLengthMet() && this.menu.update(this.input.getQuery());
+            },
+            _onBlurred: function onBlurred() {
+                if (this.input.hasQueryChangedSinceLastFocus()) {
+                    this.eventBus.trigger("change", this.input.getQuery());
+                }
+            },
+            _onEnterKeyed: function onEnterKeyed(type, $e) {
+                var $selectable;
+                if ($selectable = this.menu.getActiveSelectable()) {
+                    this.select($selectable) && $e.preventDefault();
+                }
+            },
+            _onTabKeyed: function onTabKeyed(type, $e) {
+                var $selectable;
+                if ($selectable = this.menu.getActiveSelectable()) {
+                    this.select($selectable) && $e.preventDefault();
+                } else if ($selectable = this.menu.getTopSelectable()) {
+                    this.autocomplete($selectable) && $e.preventDefault();
+                }
+            },
+            _onEscKeyed: function onEscKeyed() {
+                this.close();
+            },
+            _onUpKeyed: function onUpKeyed() {
+                this.moveCursor(-1);
+            },
+            _onDownKeyed: function onDownKeyed() {
+                this.moveCursor(+1);
+            },
+            _onLeftKeyed: function onLeftKeyed() {
+                if (this.dir === "rtl" && this.input.isCursorAtEnd()) {
+                    this.autocomplete(this.menu.getTopSelectable());
+                }
+            },
+            _onRightKeyed: function onRightKeyed() {
+                if (this.dir === "ltr" && this.input.isCursorAtEnd()) {
+                    this.autocomplete(this.menu.getTopSelectable());
+                }
+            },
+            _onQueryChanged: function onQueryChanged(e, query) {
+                this._minLengthMet(query) ? this.menu.update(query) : this.menu.empty();
+            },
+            _onWhitespaceChanged: function onWhitespaceChanged() {
+                this._updateHint();
+            },
+            _onLangDirChanged: function onLangDirChanged(e, dir) {
+                if (this.dir !== dir) {
+                    this.dir = dir;
+                    this.menu.setLanguageDirection(dir);
+                }
+            },
+            _openIfActive: function openIfActive() {
+                this.isActive() && this.open();
+            },
+            _minLengthMet: function minLengthMet(query) {
+                query = _.isString(query) ? query : this.input.getQuery() || "";
+                return query.length >= this.minLength;
+            },
+            _updateHint: function updateHint() {
+                var $selectable, data, val, query, escapedQuery, frontMatchRegEx, match;
+                $selectable = this.menu.getTopSelectable();
+                data = this.menu.getSelectableData($selectable);
+                val = this.input.getInputValue();
+                if (data && !_.isBlankString(val) && !this.input.hasOverflow()) {
+                    query = Input.normalizeQuery(val);
+                    escapedQuery = _.escapeRegExChars(query);
+                    frontMatchRegEx = new RegExp("^(?:" + escapedQuery + ")(.+$)", "i");
+                    match = frontMatchRegEx.exec(data.val);
+                    match && this.input.setHint(val + match[1]);
+                } else {
+                    this.input.clearHint();
+                }
+            },
+            isEnabled: function isEnabled() {
+                return this.enabled;
+            },
+            enable: function enable() {
+                this.enabled = true;
+            },
+            disable: function disable() {
+                this.enabled = false;
+            },
+            isActive: function isActive() {
+                return this.active;
+            },
+            activate: function activate() {
+                if (this.isActive()) {
+                    return true;
+                } else if (!this.isEnabled() || this.eventBus.before("active")) {
+                    return false;
+                } else {
+                    this.active = true;
+                    this.eventBus.trigger("active");
+                    return true;
+                }
+            },
+            deactivate: function deactivate() {
+                if (!this.isActive()) {
+                    return true;
+                } else if (this.eventBus.before("idle")) {
+                    return false;
+                } else {
+                    this.active = false;
+                    this.close();
+                    this.eventBus.trigger("idle");
+                    return true;
+                }
+            },
+            isOpen: function isOpen() {
+                return this.menu.isOpen();
+            },
+            open: function open() {
+                if (!this.isOpen() && !this.eventBus.before("open")) {
+                    this.menu.open();
+                    this._updateHint();
+                    this.eventBus.trigger("open");
+                }
+                return this.isOpen();
+            },
+            close: function close() {
+                if (this.isOpen() && !this.eventBus.before("close")) {
+                    this.menu.close();
+                    this.input.clearHint();
+                    this.input.resetInputValue();
+                    this.eventBus.trigger("close");
+                }
+                return !this.isOpen();
+            },
+            setVal: function setVal(val) {
+                this.input.setQuery(_.toStr(val));
+            },
+            getVal: function getVal() {
+                return this.input.getQuery();
+            },
+            select: function select($selectable) {
+                var data = this.menu.getSelectableData($selectable);
+                if (data && !this.eventBus.before("select", data.obj)) {
+                    this.input.setQuery(data.val, true);
+                    this.eventBus.trigger("select", data.obj);
+                    this.close();
+                    return true;
+                }
+                return false;
+            },
+            autocomplete: function autocomplete($selectable) {
+                var query, data, isValid;
+                query = this.input.getQuery();
+                data = this.menu.getSelectableData($selectable);
+                isValid = data && query !== data.val;
+                if (isValid && !this.eventBus.before("autocomplete", data.obj)) {
+                    this.input.setQuery(data.val);
+                    this.eventBus.trigger("autocomplete", data.obj);
+                    return true;
+                }
+                return false;
+            },
+            moveCursor: function moveCursor(delta) {
+                var query, $candidate, data, payload, cancelMove;
+                query = this.input.getQuery();
+                $candidate = this.menu.selectableRelativeToCursor(delta);
+                data = this.menu.getSelectableData($candidate);
+                payload = data ? data.obj : null;
+                cancelMove = this._minLengthMet() && this.menu.update(query);
+                if (!cancelMove && !this.eventBus.before("cursorchange", payload)) {
+                    this.menu.setCursor($candidate);
+                    if (data) {
+                        this.input.setInputValue(data.val);
+                    } else {
+                        this.input.resetInputValue();
+                        this._updateHint();
+                    }
+                    this.eventBus.trigger("cursorchange", payload);
+                    return true;
+                }
+                return false;
+            },
+            destroy: function destroy() {
+                this.input.destroy();
+                this.menu.destroy();
+            }
+        });
+        return Typeahead;
+        function c(ctx) {
+            var methods = [].slice.call(arguments, 1);
+            return function() {
+                var args = [].slice.call(arguments);
+                _.each(methods, function(method) {
+                    return ctx[method].apply(ctx, args);
+                });
+            };
+        }
+    }();
+    (function() {
+        "use strict";
+        var old, keys, methods;
+        old = $.fn.typeahead;
+        keys = {
+            www: "tt-www",
+            attrs: "tt-attrs",
+            typeahead: "tt-typeahead"
+        };
+        methods = {
+            initialize: function initialize(o, datasets) {
+                var www;
+                datasets = _.isArray(datasets) ? datasets : [].slice.call(arguments, 1);
+                o = o || {};
+                www = WWW(o.classNames);
+                return this.each(attach);
+                function attach() {
+                    var $input, $wrapper, $hint, $menu, defaultHint, defaultMenu, eventBus, input, menu, typeahead, MenuConstructor;
+                    _.each(datasets, function(d) {
+                        d.highlight = !!o.highlight;
+                    });
+                    $input = $(this);
+                    $wrapper = $(www.html.wrapper);
+                    $hint = $elOrNull(o.hint);
+                    $menu = $elOrNull(o.menu);
+                    defaultHint = o.hint !== false && !$hint;
+                    defaultMenu = o.menu !== false && !$menu;
+                    defaultHint && ($hint = buildHintFromInput($input, www));
+                    defaultMenu && ($menu = $(www.html.menu).css(www.css.menu));
+                    $hint && $hint.val("");
+                    $input = prepInput($input, www);
+                    if (defaultHint || defaultMenu) {
+                        $wrapper.css(www.css.wrapper);
+                        $input.css(defaultHint ? www.css.input : www.css.inputWithNoHint);
+                        $input.wrap($wrapper).parent().prepend(defaultHint ? $hint : null).append(defaultMenu ? $menu : null);
+                    }
+                    MenuConstructor = defaultMenu ? DefaultMenu : Menu;
+                    eventBus = new EventBus({
+                        el: $input
+                    });
+                    input = new Input({
+                        hint: $hint,
+                        input: $input
+                    }, www);
+                    menu = new MenuConstructor({
+                        node: $menu,
+                        datasets: datasets
+                    }, www);
+                    typeahead = new Typeahead({
+                        input: input,
+                        menu: menu,
+                        eventBus: eventBus,
+                        minLength: o.minLength
+                    }, www);
+                    $input.data(keys.www, www);
+                    $input.data(keys.typeahead, typeahead);
+                }
+            },
+            isEnabled: function isEnabled() {
+                var enabled;
+                ttEach(this.first(), function(t) {
+                    enabled = t.isEnabled();
+                });
+                return enabled;
+            },
+            enable: function enable() {
+                ttEach(this, function(t) {
+                    t.enable();
+                });
+                return this;
+            },
+            disable: function disable() {
+                ttEach(this, function(t) {
+                    t.disable();
+                });
+                return this;
+            },
+            isActive: function isActive() {
+                var active;
+                ttEach(this.first(), function(t) {
+                    active = t.isActive();
+                });
+                return active;
+            },
+            activate: function activate() {
+                ttEach(this, function(t) {
+                    t.activate();
+                });
+                return this;
+            },
+            deactivate: function deactivate() {
+                ttEach(this, function(t) {
+                    t.deactivate();
+                });
+                return this;
+            },
+            isOpen: function isOpen() {
+                var open;
+                ttEach(this.first(), function(t) {
+                    open = t.isOpen();
+                });
+                return open;
+            },
+            open: function open() {
+                ttEach(this, function(t) {
+                    t.open();
+                });
+                return this;
+            },
+            close: function close() {
+                ttEach(this, function(t) {
+                    t.close();
+                });
+                return this;
+            },
+            select: function select(el) {
+                var success = false, $el = $(el);
+                ttEach(this.first(), function(t) {
+                    success = t.select($el);
+                });
+                return success;
+            },
+            autocomplete: function autocomplete(el) {
+                var success = false, $el = $(el);
+                ttEach(this.first(), function(t) {
+                    success = t.autocomplete($el);
+                });
+                return success;
+            },
+            moveCursor: function moveCursoe(delta) {
+                var success = false;
+                ttEach(this.first(), function(t) {
+                    success = t.moveCursor(delta);
+                });
+                return success;
+            },
+            val: function val(newVal) {
+                var query;
+                if (!arguments.length) {
+                    ttEach(this.first(), function(t) {
+                        query = t.getVal();
+                    });
+                    return query;
+                } else {
+                    ttEach(this, function(t) {
+                        t.setVal(newVal);
+                    });
+                    return this;
+                }
+            },
+            destroy: function destroy() {
+                ttEach(this, function(typeahead, $input) {
+                    revert($input);
+                    typeahead.destroy();
+                });
+                return this;
+            }
+        };
+        $.fn.typeahead = function(method) {
+            if (methods[method]) {
+                return methods[method].apply(this, [].slice.call(arguments, 1));
+            } else {
+                return methods.initialize.apply(this, arguments);
+            }
+        };
+        $.fn.typeahead.noConflict = function noConflict() {
+            $.fn.typeahead = old;
+            return this;
+        };
+        function ttEach($els, fn) {
+            $els.each(function() {
+                var $input = $(this), typeahead;
+                (typeahead = $input.data(keys.typeahead)) && fn(typeahead, $input);
+            });
+        }
+        function buildHintFromInput($input, www) {
+            return $input.clone().addClass(www.classes.hint).removeData().css(www.css.hint).css(getBackgroundStyles($input)).prop("readonly", true).removeAttr("id name placeholder required").attr({
+                autocomplete: "off",
+                spellcheck: "false",
+                tabindex: -1
+            });
+        }
+        function prepInput($input, www) {
+            $input.data(keys.attrs, {
+                dir: $input.attr("dir"),
+                autocomplete: $input.attr("autocomplete"),
+                spellcheck: $input.attr("spellcheck"),
+                style: $input.attr("style")
+            });
+            $input.addClass(www.classes.input).attr({
+                autocomplete: "off",
+                spellcheck: false
+            });
+            try {
+                !$input.attr("dir") && $input.attr("dir", "auto");
+            } catch (e) {}
+            return $input;
+        }
+        function getBackgroundStyles($el) {
+            return {
+                backgroundAttachment: $el.css("background-attachment"),
+                backgroundClip: $el.css("background-clip"),
+                backgroundColor: $el.css("background-color"),
+                backgroundImage: $el.css("background-image"),
+                backgroundOrigin: $el.css("background-origin"),
+                backgroundPosition: $el.css("background-position"),
+                backgroundRepeat: $el.css("background-repeat"),
+                backgroundSize: $el.css("background-size")
+            };
+        }
+        function revert($input) {
+            var www, $wrapper;
+            www = $input.data(keys.www);
+            $wrapper = $input.parent().filter(www.selectors.wrapper);
+            _.each($input.data(keys.attrs), function(val, key) {
+                _.isUndefined(val) ? $input.removeAttr(key) : $input.attr(key, val);
+            });
+            $input.removeData(keys.typeahead).removeData(keys.www).removeData(keys.attr).removeClass(www.classes.input);
+            if ($wrapper.length) {
+                $input.detach().insertAfter($wrapper);
+                $wrapper.remove();
+            }
+        }
+        function $elOrNull(obj) {
+            var isValid, $el;
+            isValid = _.isJQuery(obj) || _.isElement(obj);
+            $el = isValid ? $(obj).first() : [];
+            return $el.length ? $el : null;
+        }
+    })();
+});
\ No newline at end of file
diff --git a/dconv/parser/__init__.py b/dconv/parser/__init__.py
new file mode 100644 (file)
index 0000000..82b8522
--- /dev/null
@@ -0,0 +1,81 @@
+__all__ = [
+    'arguments',
+    'example',
+    'keyword',
+    'seealso',
+    'table',
+    'underline'
+]
+
+
+class Parser:
+    def __init__(self, pctxt):
+        self.pctxt = pctxt
+
+    def parse(self, line):
+        return line
+
+class PContext:
+    def __init__(self, templates = None):
+        self.set_content_list([])
+        self.templates = templates
+
+    def set_content(self, content):
+        self.set_content_list(content.split("\n"))
+
+    def set_content_list(self, content):
+        self.lines = content
+        self.nblines = len(self.lines)
+        self.i = 0
+        self.stop = False
+
+    def get_lines(self):
+        return self.lines
+
+    def eat_lines(self):
+        count = 0
+        while self.has_more_lines() and self.lines[self.i].strip():
+            count += 1
+            self.next()
+        return count
+
+    def eat_empty_lines(self):
+        count = 0
+        while self.has_more_lines() and not self.lines[self.i].strip():
+            count += 1
+            self.next()
+        return count
+
+    def next(self, count=1):
+        self.i += count
+
+    def has_more_lines(self, offset=0):
+        return self.i + offset < self.nblines
+
+    def get_line(self, offset=0):
+        return self.lines[self.i + offset].rstrip()
+
+
+# Get the indentation of a line
+def get_indent(line):
+        indent = 0
+        length = len(line)
+        while indent < length and line[indent] == ' ':
+            indent += 1
+        return indent
+
+
+# Remove unneeded indentation
+def remove_indent(list):
+    # Detect the minimum indentation in the list
+    min_indent = -1
+    for line in list:
+        if not line.strip():
+            continue
+        indent = get_indent(line)
+        if min_indent < 0 or indent < min_indent:
+            min_indent = indent
+    # Realign the list content to remove the minimum indentation
+    if min_indent > 0:
+        for index, line in enumerate(list):
+            list[index] = line[min_indent:]
diff --git a/dconv/parser/arguments.py b/dconv/parser/arguments.py
new file mode 100644 (file)
index 0000000..096b269
--- /dev/null
@@ -0,0 +1,132 @@
+import sys
+import re
+import parser
+
+'''
+TODO: Allow inner data parsing (this will allow to parse the examples provided in an arguments block)
+'''
+class Parser(parser.Parser):
+    def __init__(self, pctxt):
+        parser.Parser.__init__(self, pctxt)
+        #template = pctxt.templates.get_template("parser/arguments.tpl")
+        #self.replace = template.render().strip()
+
+    def parse(self, line):
+        #return re.sub(r'(Arguments *:)', self.replace, line)
+        pctxt = self.pctxt
+
+        result = re.search(r'(Arguments? *:)', line)
+        if result:
+            label = result.group(0)
+            content = []
+
+            desc_indent = False
+            desc = re.sub(r'.*Arguments? *:', '', line).strip()
+
+            indent = parser.get_indent(line)
+
+            pctxt.next()
+            pctxt.eat_empty_lines()
+
+            arglines = []
+            if desc != "none":
+                add_empty_lines = 0
+                while pctxt.has_more_lines() and (parser.get_indent(pctxt.get_line()) > indent):
+                    for j in range(0, add_empty_lines):
+                        arglines.append("")
+                    arglines.append(pctxt.get_line())
+                    pctxt.next()
+                    add_empty_lines = pctxt.eat_empty_lines()
+                    '''
+                    print line
+
+                    if parser.get_indent(line) == arg_indent:
+                        argument = re.sub(r' *([^ ]+).*', r'\1', line)
+                        if argument:
+                            #content.append("<b>%s</b>" % argument)
+                            arg_desc = [line.replace(argument, " " * len(self.unescape(argument)), 1)]
+                            #arg_desc = re.sub(r'( *)([^ ]+)(.*)', r'\1<b>\2</b>\3', line)
+                            arg_desc_indent = parser.get_indent(arg_desc[0])
+                            arg_desc[0] = arg_desc[0][arg_indent:]
+                            pctxt.next()
+                            add_empty_lines = 0
+                            while pctxt.has_more_lines and parser.get_indent(pctxt.get_line()) >= arg_indent:
+                                for i in range(0, add_empty_lines):
+                                    arg_desc.append("")
+                                arg_desc.append(pctxt.get_line()[arg_indent:])
+                                pctxt.next()
+                                add_empty_lines = pctxt.eat_empty_lines()
+                            # TODO : reduce space at the beginnning
+                            content.append({
+                                'name': argument,
+                                'desc': arg_desc
+                            })
+                    '''
+
+                if arglines:
+                    new_arglines = []
+                    #content = self.parse_args(arglines)
+                    parser.remove_indent(arglines)
+                    '''
+                    pctxt2 = parser.PContext(pctxt.templates)
+                    pctxt2.set_content_list(arglines)
+                    while pctxt2.has_more_lines():
+                        new_arglines.append(parser.example.Parser(pctxt2).parse(pctxt2.get_line()))
+                        pctxt2.next()
+                    arglines = new_arglines
+                    '''
+
+            pctxt.stop = True
+
+            template = pctxt.templates.get_template("parser/arguments.tpl")
+            return template.render(
+                pctxt=pctxt,
+                label=label,
+                desc=desc,
+                content=arglines
+                #content=content
+            )
+            return line
+
+        return line
+
+'''
+    def parse_args(self, data):
+        args = []
+
+        pctxt = parser.PContext()
+        pctxt.set_content_list(data)
+
+        while pctxt.has_more_lines():
+            line = pctxt.get_line()
+            arg_indent = parser.get_indent(line)
+            argument = re.sub(r' *([^ ]+).*', r'\1', line)
+            if True or argument:
+                arg_desc = []
+                trailing_desc = line.replace(argument, " " * len(self.unescape(argument)), 1)[arg_indent:]
+                if trailing_desc.strip():
+                    arg_desc.append(trailing_desc)
+                pctxt.next()
+                add_empty_lines = 0
+                while pctxt.has_more_lines() and parser.get_indent(pctxt.get_line()) > arg_indent:
+                    for i in range(0, add_empty_lines):
+                        arg_desc.append("")
+                    arg_desc.append(pctxt.get_line()[arg_indent:])
+                    pctxt.next()
+                    add_empty_lines = pctxt.eat_empty_lines()
+
+                parser.remove_indent(arg_desc)
+
+                args.append({
+                    'name': argument,
+                    'desc': arg_desc
+                })
+        return args
+
+    def unescape(self, s):
+        s = s.replace("&lt;", "<")
+        s = s.replace("&gt;", ">")
+        # this has to be last:
+        s = s.replace("&amp;", "&")
+        return s
+'''
diff --git a/dconv/parser/example.py b/dconv/parser/example.py
new file mode 100644 (file)
index 0000000..3958992
--- /dev/null
@@ -0,0 +1,77 @@
+import re
+import parser
+
+# Detect examples blocks
+class Parser(parser.Parser):
+    def __init__(self, pctxt):
+        parser.Parser.__init__(self, pctxt)
+        template = pctxt.templates.get_template("parser/example/comment.tpl")
+        self.comment = template.render(pctxt=pctxt).strip()
+
+
+    def parse(self, line):
+        pctxt = self.pctxt
+
+        result = re.search(r'^ *(Examples? *:)(.*)', line)
+        if result:
+            label = result.group(1)
+
+            desc_indent = False
+            desc = result.group(2).strip()
+
+            # Some examples have a description
+            if desc:
+                desc_indent = len(line) - len(desc)
+
+            indent = parser.get_indent(line)
+
+            if desc:
+                # And some description are on multiple lines
+                while pctxt.get_line(1) and parser.get_indent(pctxt.get_line(1)) == desc_indent:
+                    desc += " " + pctxt.get_line(1).strip()
+                    pctxt.next()
+
+            pctxt.next()
+            add_empty_line = pctxt.eat_empty_lines()
+
+            content = []
+
+            if parser.get_indent(pctxt.get_line()) > indent:
+                if desc:
+                    desc = desc[0].upper() + desc[1:]
+                add_empty_line = 0
+                while pctxt.has_more_lines() and ((not pctxt.get_line()) or (parser.get_indent(pctxt.get_line()) > indent)):
+                    if pctxt.get_line():
+                        for j in range(0, add_empty_line):
+                            content.append("")
+
+                        content.append(re.sub(r'(#.*)$', self.comment, pctxt.get_line()))
+                        add_empty_line = 0
+                    else:
+                        add_empty_line += 1
+                    pctxt.next()
+            elif parser.get_indent(pctxt.get_line()) == indent:
+                # Simple example that can't have empty lines
+                if add_empty_line and desc:
+                    # This means that the example was on the same line as the 'Example' tag
+                    # and was not a description
+                    content.append(" " * indent + desc)
+                    desc = False
+                else:
+                    while pctxt.has_more_lines() and (parser.get_indent(pctxt.get_line()) >= indent):
+                        content.append(pctxt.get_line())
+                        pctxt.next()
+                    pctxt.eat_empty_lines() # Skip empty remaining lines
+
+            pctxt.stop = True
+
+            parser.remove_indent(content)
+
+            template = pctxt.templates.get_template("parser/example.tpl")
+            return template.render(
+                pctxt=pctxt,
+                label=label,
+                desc=desc,
+                content=content
+            )
+        return line
diff --git a/dconv/parser/keyword.py b/dconv/parser/keyword.py
new file mode 100644 (file)
index 0000000..f20944f
--- /dev/null
@@ -0,0 +1,142 @@
+import re
+import parser
+from urllib.parse import quote
+
+class Parser(parser.Parser):
+    def __init__(self, pctxt):
+        parser.Parser.__init__(self, pctxt)
+        self.keywordPattern = re.compile(r'^(%s%s)(%s)' % (
+            '([a-z][a-z0-9\-\+_\.]*[a-z0-9\-\+_)])', # keyword
+            '( [a-z0-9\-_]+)*',                  # subkeywords
+            '(\([^ ]*\))?',   # arg (ex: (<backend>), (<frontend>/<backend>), (<offset1>,<length>[,<offset2>]) ...
+        ))
+
+    def parse(self, line):
+        pctxt = self.pctxt
+        keywords = pctxt.keywords
+        keywordsCount = pctxt.keywordsCount
+        chapters = pctxt.chapters
+
+        res = ""
+
+        if line != "" and not re.match(r'^ ', line):
+            parsed = self.keywordPattern.match(line)
+            if parsed != None:
+                keyword = parsed.group(1)
+                arg     = parsed.group(4)
+                parameters = line[len(keyword) + len(arg):]
+                if (parameters != "" and not re.match("^ +((&lt;|\[|\{|/).*|(: [a-z +]+))?(\(deprecated\))?$", parameters)):
+                    # Dirty hack
+                    # - parameters should only start with the characer "<", "[", "{", "/"
+                    # - or a column (":") followed by a alpha keywords to identify fetching samples (optionally separated by the character "+")
+                    # - or the string "(deprecated)" at the end
+                    keyword = False
+                else:
+                    splitKeyword = keyword.split(" ")
+
+                parameters = arg + parameters
+            else:
+                keyword = False
+
+            if keyword and (len(splitKeyword) <= 5):
+                toplevel = pctxt.details["toplevel"]
+                for j in range(0, len(splitKeyword)):
+                    subKeyword = " ".join(splitKeyword[0:j + 1])
+                    if subKeyword != "no":
+                        if not subKeyword in keywords:
+                            keywords[subKeyword] = set()
+                        keywords[subKeyword].add(pctxt.details["chapter"])
+                    res += '<a class="anchor" name="%s"></a>' % subKeyword
+                    res += '<a class="anchor" name="%s-%s"></a>' % (toplevel, subKeyword)
+                    res += '<a class="anchor" name="%s-%s"></a>' % (pctxt.details["chapter"], subKeyword)
+                    res += '<a class="anchor" name="%s (%s)"></a>' % (subKeyword, chapters[toplevel]['title'])
+                    res += '<a class="anchor" name="%s (%s)"></a>' % (subKeyword, chapters[pctxt.details["chapter"]]['title'])
+
+                deprecated = parameters.find("(deprecated)")
+                if deprecated != -1:
+                    prefix = ""
+                    suffix = ""
+                    parameters = parameters.replace("(deprecated)", '<span class="label label-warning">(deprecated)</span>')
+                else:
+                    prefix = ""
+                    suffix = ""
+
+                nextline = pctxt.get_line(1)
+
+                while nextline.startswith("   "):
+                    # Found parameters on the next line
+                    parameters += "\n" + nextline
+                    pctxt.next()
+                    if pctxt.has_more_lines(1):
+                        nextline = pctxt.get_line(1)
+                    else:
+                        nextline = ""
+
+
+                parameters = self.colorize(parameters)
+                res += '<div class="keyword">%s<b><a class="anchor" name="%s"></a><a href="#%s">%s</a></b>%s%s</div>' % (prefix, keyword, quote("%s-%s" % (pctxt.details["chapter"], keyword)), keyword, parameters, suffix)
+                pctxt.next()
+                pctxt.stop = True
+            elif line.startswith("/*"):
+                # Skip comments in the documentation
+                while not pctxt.get_line().endswith("*/"):
+                    pctxt.next()
+                pctxt.next()
+            else:
+                # This is probably not a keyword but a text, ignore it
+                res += line
+        else:
+            res += line
+
+        return res
+
+    # Used to colorize keywords parameters
+    # TODO : use CSS styling
+    def colorize(self, text):
+        colorized = ""
+        tags = [
+                [ "["   , "]"   , "#008" ],
+                [ "{"   , "}"   , "#800" ],
+                [ "&lt;", "&gt;", "#080" ],
+        ]
+        heap = []
+        pos = 0
+        while pos < len(text):
+            substring = text[pos:]
+            found = False
+            for tag in tags:
+                if substring.startswith(tag[0]):
+                    # Opening tag
+                    heap.append(tag)
+                    colorized += '<span style="color: %s">%s' % (tag[2], substring[0:len(tag[0])])
+                    pos += len(tag[0])
+                    found = True
+                    break
+                elif substring.startswith(tag[1]):
+                    # Closing tag
+
+                    # pop opening tags until the corresponding one is found
+                    openingTag = False
+                    while heap and openingTag != tag:
+                        openingTag = heap.pop()
+                        if openingTag != tag:
+                            colorized += '</span>'
+                    # all intermediate tags are now closed, we can display the tag
+                    colorized += substring[0:len(tag[1])]
+                    # and the close it if it was previously opened
+                    if openingTag == tag:
+                        colorized += '</span>'
+                    pos += len(tag[1])
+                    found = True
+                    break
+            if not found:
+                colorized += substring[0]
+                pos += 1
+        # close all unterminated tags
+        while heap:
+            tag = heap.pop()
+            colorized += '</span>'
+
+        return colorized
+
+
diff --git a/dconv/parser/seealso.py b/dconv/parser/seealso.py
new file mode 100644 (file)
index 0000000..bbb53f9
--- /dev/null
@@ -0,0 +1,32 @@
+import re
+import parser
+
+class Parser(parser.Parser):
+    def parse(self, line):
+        pctxt = self.pctxt
+
+        result = re.search(r'(See also *:)', line)
+        if result:
+            label = result.group(0)
+
+            desc = re.sub(r'.*See also *:', '', line).strip()
+
+            indent = parser.get_indent(line)
+
+            # Some descriptions are on multiple lines
+            while pctxt.has_more_lines(1) and parser.get_indent(pctxt.get_line(1)) >= indent:
+                desc += " " + pctxt.get_line(1).strip()
+                pctxt.next()
+
+            pctxt.eat_empty_lines()
+            pctxt.next()
+            pctxt.stop = True
+
+            template = pctxt.templates.get_template("parser/seealso.tpl")
+            return template.render(
+                pctxt=pctxt,
+                label=label,
+                desc=desc,
+            )
+
+        return line
diff --git a/dconv/parser/table.py b/dconv/parser/table.py
new file mode 100644 (file)
index 0000000..e2575b1
--- /dev/null
@@ -0,0 +1,244 @@
+import re
+import sys
+import parser
+
+class Parser(parser.Parser):
+    def __init__(self, pctxt):
+        parser.Parser.__init__(self, pctxt)
+        self.table1Pattern = re.compile(r'^ *(-+\+)+-+')
+        self.table2Pattern = re.compile(r'^ *\+(-+\+)+')
+
+    def parse(self, line):
+        global document, keywords, keywordsCount, chapters, keyword_conflicts
+
+        pctxt = self.pctxt
+
+        if pctxt.context['headers']['subtitle'] != 'Configuration Manual':
+            # Quick exit
+            return line
+        elif pctxt.details['chapter'] == "4":
+            # BUG: the matrix in chapter 4. Proxies is not well displayed, we skip this chapter
+            return line
+
+        if pctxt.has_more_lines(1):
+            nextline = pctxt.get_line(1)
+        else:
+            nextline = ""
+
+        if self.table1Pattern.match(nextline):
+            # activate table rendering only for the Configuration Manual
+            lineSeparator = nextline
+            nbColumns = nextline.count("+") + 1
+            extraColumns = 0
+            print("Entering table mode (%d columns)" % nbColumns, file=sys.stderr)
+            table = []
+            if line.find("|") != -1:
+                row = []
+                while pctxt.has_more_lines():
+                    line = pctxt.get_line()
+                    if pctxt.has_more_lines(1):
+                        nextline = pctxt.get_line(1)
+                    else:
+                        nextline = ""
+                    if line == lineSeparator:
+                        # New row
+                        table.append(row)
+                        row = []
+                        if nextline.find("|") == -1:
+                            break # End of table
+                    else:
+                        # Data
+                        columns = line.split("|")
+                        for j in range(0, len(columns)):
+                            try:
+                                if row[j]:
+                                    row[j] += "<br />"
+                                row[j] += columns[j].strip()
+                            except:
+                                row.append(columns[j].strip())
+                    pctxt.next()
+            else:
+                row = []
+                headers = nextline
+                while pctxt.has_more_lines():
+                    line = pctxt.get_line()
+                    if pctxt.has_more_lines(1):
+                        nextline = pctxt.get_line(1)
+                    else:
+                        nextline = ""
+
+                    if nextline == "":
+                        if row: table.append(row)
+                        break # End of table
+
+                    if (line != lineSeparator) and (line[0] != "-"):
+                        start = 0
+
+                        if row and not line.startswith(" "):
+                            # Row is complete, parse a new one
+                            table.append(row)
+                            row = []
+
+                        tmprow = []
+                        while start != -1:
+                            end = headers.find("+", start)
+                            if end == -1:
+                                end = len(headers)
+
+                            realend = end
+                            if realend == len(headers):
+                                realend = len(line)
+                            else:
+                                while realend < len(line) and line[realend] != " ":
+                                    realend += 1
+                                    end += 1
+
+                            tmprow.append(line[start:realend])
+
+                            start = end + 1
+                            if start >= len(headers):
+                                start = -1
+                        for j in range(0, nbColumns):
+                            try:
+                                row[j] += tmprow[j].strip()
+                            except:
+                                row.append(tmprow[j].strip())
+
+                        deprecated = row[0].endswith("(deprecated)")
+                        if deprecated:
+                            row[0] = row[0][: -len("(deprecated)")].rstrip()
+
+                        nooption = row[1].startswith("(*)")
+                        if nooption:
+                            row[1] = row[1][len("(*)"):].strip()
+
+                        if deprecated or nooption:
+                            extraColumns = 1
+                            extra = ""
+                            if deprecated:
+                                extra += '<span class="label label-warning">(deprecated)</span>'
+                            if nooption:
+                                extra += '<span>(*)</span>'
+                            row.append(extra)
+
+                    pctxt.next()
+            print("Leaving table mode", file=sys.stderr)
+            pctxt.next() # skip useless next line
+            pctxt.stop = True
+
+            return self.renderTable(table, nbColumns, pctxt.details["toplevel"])
+        # elif self.table2Pattern.match(line):
+        #    return self.parse_table_format2()
+        elif line.find("May be used in sections") != -1:
+            nextline = pctxt.get_line(1)
+            rows = []
+            headers = line.split(":")
+            rows.append(headers[1].split("|"))
+            rows.append(nextline.split("|"))
+            table = {
+                    "rows": rows,
+                    "title": headers[0]
+            }
+            pctxt.next(2)  # skip this previous table
+            pctxt.stop = True
+
+            return self.renderTable(table)
+
+        return line
+
+
+    def parse_table_format2(self):
+        pctxt = self.pctxt
+
+        linesep = pctxt.get_line()
+        rows = []
+
+        pctxt.next()
+        maxcols = 0
+        while pctxt.get_line().strip().startswith("|"):
+            row = pctxt.get_line().strip()[1:-1].split("|")
+            rows.append(row)
+            maxcols = max(maxcols, len(row))
+            pctxt.next()
+            if pctxt.get_line() == linesep:
+                # TODO : find a way to define a special style for next row
+                pctxt.next()
+        pctxt.stop = True
+
+        return self.renderTable(rows, maxcols)
+
+    # Render tables detected by the conversion parser
+    def renderTable(self, table, maxColumns = 0, toplevel = None):
+        pctxt  = self.pctxt
+        template = pctxt.templates.get_template("parser/table.tpl")
+
+        res = ""
+
+        title = None
+        if isinstance(table, dict):
+            title = table["title"]
+            table = table["rows"]
+
+        if not maxColumns:
+            maxColumns = len(table[0])
+
+        rows = []
+
+        mode = "th"
+        headerLine = ""
+        hasKeywords = False
+        i = 0
+        for row in table:
+            line = ""
+
+            if i == 0:
+                row_template = pctxt.templates.get_template("parser/table/header.tpl")
+            else:
+                row_template = pctxt.templates.get_template("parser/table/row.tpl")
+
+            if i > 1 and (i  - 1) % 20 == 0 and len(table) > 50:
+                # Repeat headers periodically for long tables
+                rows.append(headerLine)
+
+            j = 0
+            cols = []
+            for column in row:
+                if j >= maxColumns:
+                    break
+
+                tplcol = {}
+
+                data = column.strip()
+                keyword = column
+                if j == 0 and i == 0 and keyword == 'keyword':
+                    hasKeywords = True
+                if j == 0 and i != 0 and hasKeywords:
+                    if keyword.startswith("[no] "):
+                        keyword = keyword[len("[no] "):]
+                    tplcol['toplevel'] = toplevel
+                    tplcol['keyword'] = keyword
+                tplcol['extra'] = []
+                if j == 0 and len(row) > maxColumns:
+                    for k in range(maxColumns, len(row)):
+                        tplcol['extra'].append(row[k])
+                tplcol['data'] = data
+                cols.append(tplcol)
+                j += 1
+            mode = "td"
+
+            line = row_template.render(
+                pctxt=pctxt,
+                columns=cols
+            ).strip()
+            if i == 0:
+                headerLine = line
+
+            rows.append(line)
+
+            i += 1
+
+        return template.render(
+            pctxt=pctxt,
+            title=title,
+            rows=rows,
+        )
diff --git a/dconv/parser/underline.py b/dconv/parser/underline.py
new file mode 100644 (file)
index 0000000..3a2350c
--- /dev/null
@@ -0,0 +1,16 @@
+import parser
+
+class Parser(parser.Parser):
+    # Detect underlines
+    def parse(self, line):
+        pctxt = self.pctxt
+        if pctxt.has_more_lines(1):
+            nextline = pctxt.get_line(1)
+            if (len(line) > 0) and (len(nextline) > 0) and (nextline[0] == '-') and ("-" * len(line) == nextline):
+                template = pctxt.templates.get_template("parser/underline.tpl")
+                line = template.render(pctxt=pctxt, data=line).strip()
+                pctxt.next(2)
+                pctxt.eat_empty_lines()
+                pctxt.stop = True
+
+        return line
diff --git a/dconv/templates/parser/arguments.tpl b/dconv/templates/parser/arguments.tpl
new file mode 100644 (file)
index 0000000..b5f91e9
--- /dev/null
@@ -0,0 +1,9 @@
+<div class="separator">
+<span class="label label-info">${label}</span>\
+% if desc:
+ ${desc}
+% endif
+% if content:
+<pre class="prettyprint arguments">${"\n".join(content)}</pre>
+% endif
+</div>
diff --git a/dconv/templates/parser/example.tpl b/dconv/templates/parser/example.tpl
new file mode 100644 (file)
index 0000000..184b6dd
--- /dev/null
@@ -0,0 +1,12 @@
+<div class="separator">
+<span class="label label-success">${label}</span>
+<pre class="prettyprint">
+% if desc:
+<div class="example-desc">${desc}</div>\
+% endif
+<code>\
+% for line in content:
+${line}
+% endfor
+</code></pre>
+</div>
\ No newline at end of file
diff --git a/dconv/templates/parser/example/comment.tpl b/dconv/templates/parser/example/comment.tpl
new file mode 100644 (file)
index 0000000..b51ec2d
--- /dev/null
@@ -0,0 +1 @@
+<span class="comment">\1</span>
\ No newline at end of file
diff --git a/dconv/templates/parser/seealso.tpl b/dconv/templates/parser/seealso.tpl
new file mode 100644 (file)
index 0000000..72cf5f9
--- /dev/null
@@ -0,0 +1 @@
+<div class="page-header"><b>${label}</b> ${desc}</div>
diff --git a/dconv/templates/parser/table.tpl b/dconv/templates/parser/table.tpl
new file mode 100644 (file)
index 0000000..0119176
--- /dev/null
@@ -0,0 +1,11 @@
+% if title:
+<div><p>${title} :</p>\
+% endif
+<table class="table table-bordered" border="0" cellspacing="0" cellpadding="0">
+% for row in rows:
+${row}
+% endfor
+</table>\
+% if title:
+</div>
+% endif
\ No newline at end of file
diff --git a/dconv/templates/parser/table/header.tpl b/dconv/templates/parser/table/header.tpl
new file mode 100644 (file)
index 0000000..e84b47f
--- /dev/null
@@ -0,0 +1,6 @@
+<thead><tr>\
+% for col in columns:
+<% data = col['data'] %>\
+<th>${data}</th>\
+% endfor
+</tr></thead>
diff --git a/dconv/templates/parser/table/row.tpl b/dconv/templates/parser/table/row.tpl
new file mode 100644 (file)
index 0000000..e4f2bef
--- /dev/null
@@ -0,0 +1,36 @@
+<% from urllib.parse import quote %>
+<% base = pctxt.context['base'] %>
+<tr>\
+% for col in columns:
+<% data = col['data'] %>\
+<%
+    if data in ['yes']:
+        style = "class=\"alert-success pagination-centered\""
+        data = 'yes<br /><img src="%scss/check.png" alt="yes" title="yes" />' % base
+    elif data in ['no']:
+        style = "class=\"alert-error pagination-centered\""
+        data = 'no<br /><img src="%scss/cross.png" alt="no" title="no" />' % base
+    elif data in ['X']:
+        style = "class=\"pagination-centered\""
+        data = '<img src="%scss/check.png" alt="X" title="yes" />' % base
+    elif data in ['-']:
+        style = "class=\"pagination-centered\""
+        data = '&nbsp;'
+    elif data in ['*']:
+        style = "class=\"pagination-centered\""
+    else:
+        style = None
+%>\
+<td ${style}>\
+% if "keyword" in col:
+<a href="#${quote("%s-%s" % (col['toplevel'], col['keyword']))}">\
+% for extra in col['extra']:
+<span class="pull-right">${extra}</span>\
+% endfor
+${data}</a>\
+% else:
+${data}\
+% endif
+</td>\
+% endfor
+</tr>
diff --git a/dconv/templates/parser/underline.tpl b/dconv/templates/parser/underline.tpl
new file mode 100644 (file)
index 0000000..4f35f7e
--- /dev/null
@@ -0,0 +1 @@
+<h5>${data}</h5>
diff --git a/dconv/templates/summary.html b/dconv/templates/summary.html
new file mode 100644 (file)
index 0000000..87c6414
--- /dev/null
@@ -0,0 +1,43 @@
+<a class="anchor" id="summary" name="summary"></a>
+<div class="page-header">
+       <h1 id="chapter-summary" data-target="summary">Summary</h1>
+</div>
+<div class="row">
+       <div class="col-md-6">
+               <% previousLevel = None %>
+               % for k in chapterIndexes:
+                       <% chapter = chapters[k] %>
+                       % if chapter['title']:
+                               <%
+                                       if chapter['level'] == 1:
+                                               otag = "<b>"
+                                               etag = "</b>"
+                                       else:
+                                               otag = etag = ""
+                               %>
+                               % if chapter['chapter'] == '7':
+                                       ## Quick and dirty hack to split the summary in 2 columns
+                                       ## TODO : implement a generic way split the summary
+                                       </div><div class="col-md-6">
+                                       <% previousLevel = None %>
+                               % endif
+                               % if otag and previousLevel:
+                                       <br />
+                               % endif
+                               <div class="row">
+                                       <div class="col-md-2 pagination-right noheight">${otag}<small>${chapter['chapter']}.</small>${etag}</div>
+                                       <div class="col-md-10 noheight">
+                                               % for tab in range(1, chapter['level']):
+                                                       <div class="tab">
+                                               % endfor
+                                                       <a href="#${chapter['chapter']}">${otag}${chapter['title']}${etag}</a>
+                                               % for tab in range(1, chapter['level']):
+                                                       </div>
+                                               % endfor
+                                       </div>
+                               </div>
+                               <% previousLevel = chapter['level'] %>
+                       % endif
+               % endfor
+       </div>
+</div>
diff --git a/dconv/templates/template.html b/dconv/templates/template.html
new file mode 100644 (file)
index 0000000..c72b355
--- /dev/null
@@ -0,0 +1,238 @@
+<!DOCTYPE html>
+<html lang="en">
+       <head>
+               <meta charset="utf-8" />
+               <title>${headers['title']} ${headers['version']} - ${headers['subtitle']}</title>
+               <link href="//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet" />
+               <link href="${base}css/page.css?${version}" rel="stylesheet" />
+       </head>
+       <body>
+               <nav class="navbar navbar-default navbar-fixed-top" role="navigation">
+                       <div class="navbar-header">
+                               <button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#menu">
+                                       <span class="sr-only">Toggle navigation</span>
+                                       <span class="icon-bar"></span>
+                                       <span class="icon-bar"></span>
+                                       <span class="icon-bar"></span>
+                               </button>
+                               <a class="navbar-brand" href="${base}index.html">${headers['title']} <small>${headers['subtitle']}</small></a>
+                       </div>
+                       <!-- /.navbar-header -->
+
+                       <!-- Collect the nav links, forms, and other content for toggling -->
+                       <div class="collapse navbar-collapse" id="menu">
+                               <ul class="nav navbar-nav">
+                                       <li><a href="http://www.haproxy.org/">HAProxy home page</a></li>
+                                       <li class="dropdown">
+                                               <a href="#" class="dropdown-toggle" data-toggle="dropdown">Versions <b class="caret"></b></a>
+                                               <ul class="dropdown-menu">
+                                                       ## TODO : provide a structure to dynamically generate per version links
+                                                       <li class="dropdown-header">HAProxy 1.4</li>
+                                                       <li><a href="${base}configuration-1.4.html">Configuration Manual <small>(stable)</small></a></li>
+                                                       <li><a href="${base}snapshot/configuration-1.4.html">Configuration Manual <small>(snapshot)</small></a></li>
+                                                       <li><a href="http://git.1wt.eu/git/haproxy-1.4.git/">GIT Repository</a></li>
+                                                       <li><a href="http://www.haproxy.org/git/?p=haproxy-1.4.git">Browse repository</a></li>
+                                                       <li><a href="http://www.haproxy.org/download/1.4/">Browse directory</a></li>
+                                                       <li class="divider"></li>
+                                                       <li class="dropdown-header">HAProxy 1.5</li>
+                                                       <li><a href="${base}configuration-1.5.html">Configuration Manual <small>(stable)</small></a></li>
+                                                       <li><a href="${base}snapshot/configuration-1.5.html">Configuration Manual <small>(snapshot)</small></a></li>
+                                                       <li><a href="http://git.1wt.eu/git/haproxy-1.5.git/">GIT Repository</a></li>
+                                                       <li><a href="http://www.haproxy.org/git/?p=haproxy-1.5.git">Browse repository</a></li>
+                                                       <li><a href="http://www.haproxy.org/download/1.5/">Browse directory</a></li>
+                            <li class="divider"></li>
+                            <li class="dropdown-header">HAProxy 1.6</li>
+                            <li><a href="${base}configuration-1.6.html">Configuration Manual <small>(stable)</small></a></li>
+                            <li><a href="${base}snapshot/configuration-1.6.html">Configuration Manual <small>(snapshot)</small></a></li>
+                            <li><a href="${base}intro-1.6.html">Starter Guide <small>(stable)</small></a></li>
+                            <li><a href="${base}snapshot/intro-1.6.html">Starter Guide <small>(snapshot)</small></a></li>
+                            <li><a href="http://git.1wt.eu/git/haproxy.git/">GIT Repository</a></li>
+                            <li><a href="http://www.haproxy.org/git/?p=haproxy.git">Browse repository</a></li>
+                            <li><a href="http://www.haproxy.org/download/1.6/">Browse directory</a></li>
+                                               </ul>
+                                       </li>
+                               </ul>
+                       </div>
+               </nav>
+               <!-- /.navbar-static-side -->
+
+               <div id="wrapper">
+
+                       <div id="sidebar">
+                               <form onsubmit="search(this.keyword.value); return false" role="form">
+                                       <div id="searchKeyword" class="form-group">
+                                               <input type="text" class="form-control typeahead" id="keyword" name="keyword" placeholder="Search..." autocomplete="off">
+                                       </div>
+                               </form>
+                               <p>
+                                       Keyboard navigation&nbsp;: <span id="keyboardNavStatus"></span>
+                               </p>
+                               <p>
+                                       When enabled, you can use <strong>left</strong> and <strong>right</strong> arrow keys to navigate between chapters.<br>
+                                       The feature is automatically disabled when the search field is focused.
+                               </p>
+                               <p class="text-right">
+                                       <small>Converted with <a href="https://github.com/cbonte/haproxy-dconv">haproxy-dconv</a> v<b>${version}</b> on <b>${date}</b></small>
+                               </p>
+                       </div>
+                       <!-- /.sidebar -->
+
+                       <div id="page-wrapper">
+                               <div class="row">
+                                       <div class="col-lg-12">
+                                               <div class="text-center">
+                                                       <h1>${headers['title']}</h1>
+                                                       <h2>${headers['subtitle']}</h2>
+                                                       <p><strong>${headers['version']}</strong></p>
+                                                       <p>
+                                                               <a href="http://www.haproxy.org/" title="HAProxy Home Page"><img src="${base}img/logo-med.png" /></a><br>
+                                                               ${headers['author']}<br>
+                                                               ${headers['date']}
+                                                       </p>
+                                               </div>
+
+                                               ${document}
+                                               <br>
+                                               <hr>
+                                               <div class="text-right">
+                                                       ${headers['title']} ${headers['version'].replace("version ", "")} &ndash; ${headers['subtitle']}<br>
+                                                       <small>${headers['date']}, ${headers['author']}</small>
+                                               </div>
+                                       </div>
+                                       <!-- /.col-lg-12 -->
+                               </div>
+                               <!-- /.row -->
+                               <div style="position: fixed; z-index: 1000; bottom: 0; left: 0; right: 0; padding: 10px">
+                                       <ul class="pager" style="margin: 0">
+                                               <li class="previous"><a id="previous" href="#"></a></li>
+                                               <li class="next"><a id="next" href="#"></a></li>
+                                       </ul>
+                               </div>
+                       </div>
+                       <!-- /#page-wrapper -->
+
+               </div>
+               <!-- /#wrapper -->
+
+               <script src="//cdnjs.cloudflare.com/ajax/libs/jquery/1.11.0/jquery.min.js"></script>
+               <script src="//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.1.1/js/bootstrap.min.js"></script>
+               <script src="//cdnjs.cloudflare.com/ajax/libs/typeahead.js/0.11.1/typeahead.bundle.min.js"></script>
+               <script>
+                       /* Keyword search */
+                       var searchFocus = false
+                       var keywords = [
+                               "${'",\n\t\t\t\t"'.join(keywords)}"
+                       ]
+
+                       function updateKeyboardNavStatus() {
+                               var status = searchFocus ? '<span class="label label-disabled">Disabled</span>' : '<span class="label label-success">Enabled</span>'
+                               $('#keyboardNavStatus').html(status)
+                       }
+
+                       function search(keyword) {
+                               if (keyword && !!~$.inArray(keyword, keywords)) {
+                                       window.location.hash = keyword
+                               }
+                       }
+                       // constructs the suggestion engine
+                       var kwbh = new Bloodhound({
+                               datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
+                               queryTokenizer: Bloodhound.tokenizers.whitespace,
+                               local: $.map(keywords, function(keyword) { return { value: keyword }; })
+                       });
+                       kwbh.initialize()
+
+                       $('#searchKeyword .typeahead').typeahead({
+                               hint: true,
+                               highlight: true,
+                               minLength: 1,
+                               autoselect: true
+                       },
+                       {
+                               name: 'keywords',
+                               displayKey: 'value',
+                               limit: keywords.length,
+                               source: kwbh.ttAdapter()
+                       }).focus(function() {
+                               searchFocus = true
+                               updateKeyboardNavStatus()
+                       }).blur(function() {
+                               searchFocus = false
+                               updateKeyboardNavStatus()
+                       }).bind('typeahead:selected', function ($e, datum) {
+                               search(datum.value)
+                       })
+
+                       /* EXPERIMENTAL - Previous/Next navigation */
+                       var headings = $(":header")
+                       var previousTarget = false
+                       var nextTarget = false
+                       var $previous = $('#previous')
+                       var $next = $('#next')
+                       function refreshNavigation() {
+                               var previous = false
+                               var next = false
+                               $.each(headings, function(item, value) {
+                                       var el = $(value)
+
+                                       // TODO : avoid target recalculation on each refresh
+                                       var target = el.attr('data-target')
+                                       if (! target) return true
+
+                                       var target_el = $('#' + target.replace(/\./, "\\."))
+                                       if (! target_el.attr('id')) return true
+
+                                       if (target_el.offset().top < $(window).scrollTop()) {
+                                               previous = el
+                                       }
+                                       if (target_el.offset().top - 1 > $(window).scrollTop()) {
+                                               next = el
+                                       }
+                                       if (next) return false
+                               })
+
+                               previousTarget = previous ? previous.attr('data-target') : 'top'
+                               $previous.html(
+                                       previous && previousTarget ?
+                                               '<span class="glyphicon glyphicon-arrow-left"></span> ' + previous.text() :
+                                               '<span class="glyphicon glyphicon-arrow-up"></span> Top'
+                               ).attr('href', '#' + previousTarget)
+
+                               nextTarget = next ? next.attr('data-target') : 'bottom'
+                               $next.html(
+                                       next && nextTarget ?
+                                               next.text() + ' <span class="glyphicon glyphicon-arrow-right"></span>' :
+                                               'Bottom <span class="glyphicon glyphicon-arrow-down"></span>'
+                               ).attr('href', '#' + nextTarget)
+                       }
+
+                       $(window).scroll(function () {
+                               refreshNavigation()
+                       });
+                       $(document).ready(function() {
+                               refreshNavigation()
+                               updateKeyboardNavStatus()
+                       });
+
+                       /* EXPERIMENTAL - Enable keyboard navigation */
+                       $(document).keydown(function(e){
+                               if (searchFocus) return
+
+                               switch(e.which) {
+                                       case 37: // left
+                                       window.location.hash = previousTarget ? previousTarget : 'top'
+                                       break
+
+                                       case 39: // right
+                                       window.location.hash = nextTarget ? nextTarget : 'bottom'
+                                       break
+
+                                       default: return // exit this handler for other keys
+                               }
+                               e.preventDefault()
+                       })
+               </script>
+               ${footer}
+               <a class="anchor" name="bottom"></a>
+       </body>
+</html>
diff --git a/dconv/tools/generate-docs.sh b/dconv/tools/generate-docs.sh
new file mode 100755 (executable)
index 0000000..36fdf1b
--- /dev/null
@@ -0,0 +1,177 @@
+#!/bin/bash
+
+PROJECT_HOME=$(dirname $(readlink -f $0))
+cd $PROJECT_HOME || exit 1
+
+WORK_DIR=$PROJECT_HOME/work
+
+function on_exit()
+{
+       echo "-- END $(date)"
+}
+
+function init()
+{
+       trap on_exit EXIT
+
+       echo
+       echo "-- START $(date)"
+       echo "PROJECT_HOME = $PROJECT_HOME"
+
+       echo "Preparing work directories..."
+       mkdir -p $WORK_DIR || exit 1
+       mkdir -p $WORK_DIR/haproxy || exit 1
+       mkdir -p $WORK_DIR/haproxy-dconv || exit 1
+
+       UPDATED=0
+       PUSH=0
+
+}
+
+# Needed as "git -C" is only available since git 1.8.5
+function git-C()
+{
+       _gitpath=$1
+       shift
+       echo "git --git-dir=$_gitpath/.git --work-tree=$_gitpath $@" >&2
+       git --git-dir=$_gitpath/.git --work-tree=$_gitpath "$@"
+}
+
+function fetch_haproxy_dconv()
+{
+       echo "Fetching latest haproxy-dconv public version..."
+       if [ ! -e $WORK_DIR/haproxy-dconv/master ];
+       then
+               git clone -v git://github.com/cbonte/haproxy-dconv.git $WORK_DIR/haproxy-dconv/master || exit 1
+       fi
+       GIT="git-C $WORK_DIR/haproxy-dconv/master"
+
+       OLD_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)"
+       $GIT checkout master && $GIT pull -v
+       version=$($GIT describe --tags)
+       version=${version%-g*}
+       NEW_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)"
+       if [ "$OLD_MD5" != "$NEW_MD5" ];
+       then
+               UPDATED=1
+       fi
+
+       echo "Fetching last haproxy-dconv public pages version..."
+       if [ ! -e $WORK_DIR/haproxy-dconv/gh-pages ];
+       then
+               cp -a $WORK_DIR/haproxy-dconv/master $WORK_DIR/haproxy-dconv/gh-pages || exit 1
+       fi
+       GIT="git-C $WORK_DIR/haproxy-dconv/gh-pages"
+
+       $GIT checkout gh-pages && $GIT pull -v
+}
+
+function fetch_haproxy()
+{
+       url=$1
+       path=$2
+
+       echo "Fetching HAProxy 1.4 repository..."
+       if [ ! -e $path ];
+       then
+               git clone -v $url $path || exit 1
+       fi
+       GIT="git-C $path"
+
+       $GIT checkout master && $GIT pull -v
+}
+
+function _generate_file()
+{
+    infile=$1
+       destfile=$2
+       git_version=$3
+       state=$4
+
+       $GIT checkout $git_version
+
+       if [ -e $gitpath/doc/$infile ];
+       then
+
+               git_version_simple=${git_version%-g*}
+               doc_version=$(tail -n1 $destfile 2>/dev/null | grep " git:" | sed 's/.* git:\([^ ]*\).*/\1/')
+               if [ $UPDATED -eq 1 -o "$git_version" != "$doc_version" ];
+               then
+                       HTAG="VERSION-$(basename $gitpath | sed 's/[.]/\\&/g')"
+                       if [ "$state" == "snapshot" ];
+                       then
+                               base=".."
+                               HTAG="$HTAG-SNAPSHOT"
+                       else
+                               base="."
+                       fi
+
+
+                       $WORK_DIR/haproxy-dconv/master/haproxy-dconv.py -i $gitpath/doc/$infile -o $destfile --base=$base &&
+                       echo "<!-- git:$git_version -->" >> $destfile &&
+                       sed -i "s/\(<\!-- $HTAG -->\)\(.*\)\(<\!-- \/$HTAG -->\)/\1${git_version_simple}\3/" $docroot/index.html
+
+               else
+                       echo "Already up to date."
+               fi
+
+               if [ "$doc_version" != "" -a "$git_version" != "$doc_version" ];
+               then
+                       changelog=$($GIT log --oneline $doc_version..$git_version $gitpath/doc/$infile)
+               else
+                       changelog=""
+               fi
+
+               GITDOC="git-C $docroot"
+               if [ "$($GITDOC status -s $destfile)" != "" ];
+               then
+                       $GITDOC add $destfile &&
+                       $GITDOC commit -m "Updating HAProxy $state $infile ${git_version_simple} generated by haproxy-dconv $version" -m "$changelog" $destfile $docroot/index.html &&
+                       PUSH=1
+               fi
+       fi
+}
+
+function generate_docs()
+{
+       url=$1
+       gitpath=$2
+       docroot=$3
+    infile=$4
+       outfile=$5
+
+       fetch_haproxy $url $gitpath
+
+       GIT="git-C $gitpath"
+
+       $GIT checkout master
+       git_version=$($GIT describe --tags --match 'v*')
+       git_version_stable=${git_version%-*-g*}
+
+       echo "Generating snapshot version $git_version..."
+       _generate_file $infile $docroot/snapshot/$outfile $git_version snapshot
+
+       echo "Generating stable version $git_version..."
+       _generate_file $infile $docroot/$outfile $git_version_stable stable
+}
+
+function push()
+{
+       docroot=$1
+       GITDOC="git-C $docroot"
+
+       if [ $PUSH -eq 1 ];
+       then
+               $GITDOC push origin gh-pages
+       fi
+
+}
+
+
+init
+fetch_haproxy_dconv
+generate_docs http://git.1wt.eu/git/haproxy-1.4.git/ $WORK_DIR/haproxy/1.4 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.4.html
+generate_docs http://git.1wt.eu/git/haproxy-1.5.git/ $WORK_DIR/haproxy/1.5 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.5.html
+generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.6.html
+generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages intro.txt intro-1.6.html
+push $WORK_DIR/haproxy-dconv/gh-pages
diff --git a/gbp.conf b/gbp.conf
new file mode 100644 (file)
index 0000000..b608587
--- /dev/null
+++ b/gbp.conf
@@ -0,0 +1,4 @@
+[DEFAULT]
+pristine-tar = True
+upstream-branch = upstream-2.0
+debian-branch = master
diff --git a/halog.1 b/halog.1
new file mode 100644 (file)
index 0000000..f5dd19f
--- /dev/null
+++ b/halog.1
@@ -0,0 +1,108 @@
+.TH HALOG "1" "July 2013" "halog" "User Commands"
+.SH NAME
+halog \- HAProxy log statistics reporter
+.SH SYNOPSIS
+.B halog
+[\fI-h|--help\fR]
+.br
+.B halog
+[\fIoptions\fR] <LOGFILE
+.SH DESCRIPTION
+.B halog
+reads HAProxy log data from stdin and extracts and displays lines matching
+user-specified criteria.
+.SH OPTIONS
+.SS Input filters \fR(several filters may be combined)
+.TP
+\fB\-H\fR
+Only match lines containing HTTP logs (ignore TCP)
+.TP
+\fB\-E\fR
+Only match lines without any error (no 5xx status)
+.TP
+\fB\-e\fR
+Only match lines with errors (status 5xx or negative)
+.TP
+\fB\-rt\fR|\fB\-RT\fR <time>
+Only match response times larger|smaller than <time>
+.TP
+\fB\-Q\fR|\fB\-QS\fR
+Only match queued requests (any queue|server queue)
+.TP
+\fB\-tcn\fR|\fB\-TCN\fR <code>
+Only match requests with/without termination code <code>
+.TP
+\fB\-hs\fR|\fB\-HS\fR <[min][:][max]>
+Only match requests with HTTP status codes within/not within min..max. Any of
+them may be omitted. Exact code is checked for if no ':' is specified.
+.SS
+Modifiers
+.TP
+\fB\-v\fR
+Invert the input filtering condition
+.TP
+\fB\-q\fR
+Don't report errors/warnings
+.TP
+\fB\-m\fR <lines>
+Limit output to the first <lines> lines
+.SS
+Output filters \fR\- only one may be used at a time
+.TP
+\fB\-c\fR
+Only report the number of lines that would have been printed
+.TP
+\fB\-pct\fR
+Output connect and response times percentiles
+.TP
+\fB\-st\fR
+Output number of requests per HTTP status code
+.TP
+\fB\-cc\fR
+Output number of requests per cookie code (2 chars)
+.TP
+\fB\-tc\fR
+Output number of requests per termination code (2 chars)
+.TP
+\fB\-srv\fR
+Output statistics per server (time, requests, errors)
+.TP
+\fB\-u\fR*
+Output statistics per URL (time, requests, errors)
+.br
+Additional characters indicate the output sorting key:
+.RS
+.TP
+\fB\-u\fR
+URL
+.TP
+\fB\-uc\fR
+Request count
+.TP
+\fB\-ue\fR
+Error count
+.TP
+\fB\-ua\fR
+Average response time
+.TP
+\fB\-ut\fR
+Average total time
+.TP
+\fB\-uao\fR, \fB\-uto\fR
+Average times computed on valid ('OK') requests
+.TP
+\fB\-uba\fR
+Average bytes returned
+.TP
+\fB\-ubt\fR
+Total bytes returned
+.RE
+.SH "SEE ALSO"
+.BR haproxy (1)
+.SH AUTHOR
+.PP
+\fBhalog\fR was written by Willy Tarreau <w@1wt.eu> and is part of \fBhaproxy\fR(1).
+.PP
+This manual page was written by Apollon Oikonomopoulos <apoikos@gmail.com> for the Debian project (but may
+be used by others).
+
diff --git a/haproxy-doc.doc-base.haproxy b/haproxy-doc.doc-base.haproxy
new file mode 100644 (file)
index 0000000..9d9a967
--- /dev/null
@@ -0,0 +1,9 @@
+Document: haproxy-doc
+Title: HAProxy Documentation
+Author: Willy Tarreau
+Abstract: This documentation covers the configuration of HAProxy.
+Section: System/Administration
+
+Format: HTML
+Index: /usr/share/doc/haproxy/html/configuration.html
+Files: /usr/share/doc/haproxy/html/*.html
diff --git a/haproxy-doc.doc-base.haproxy-lua b/haproxy-doc.doc-base.haproxy-lua
new file mode 100644 (file)
index 0000000..8234d78
--- /dev/null
@@ -0,0 +1,9 @@
+Document: haproxy-lua-api
+Title: HAProxy Lua API Documentation
+Author: Thierry FOURNIER
+Abstract: This documentation covers HAProxy's Lua API
+Section: System/Administration
+
+Format: HTML
+Index: /usr/share/doc/haproxy/lua/index.html
+Files: /usr/share/doc/haproxy/lua/*.html
diff --git a/haproxy-doc.docs b/haproxy-doc.docs
new file mode 100644 (file)
index 0000000..569c21a
--- /dev/null
@@ -0,0 +1 @@
+debian/dconv/NOTICE
diff --git a/haproxy-doc.install b/haproxy-doc.install
new file mode 100644 (file)
index 0000000..e0a347e
--- /dev/null
@@ -0,0 +1,7 @@
+doc/configuration.html usr/share/doc/haproxy/html/
+doc/intro.html usr/share/doc/haproxy/html/
+doc/management.html usr/share/doc/haproxy/html/
+doc/lua-api/_build/html/* usr/share/doc/haproxy/lua/
+debian/dconv/css/* usr/share/doc/haproxy/html/css/
+debian/dconv/js/* usr/share/doc/haproxy/html/js/
+debian/dconv/img/* usr/share/doc/haproxy/html/img/
diff --git a/haproxy-doc.links b/haproxy-doc.links
new file mode 100644 (file)
index 0000000..93738f6
--- /dev/null
@@ -0,0 +1,6 @@
+usr/share/javascript/bootstrap/css/bootstrap.min.css usr/share/doc/haproxy/html/css/bootstrap.min.css
+usr/share/javascript/bootstrap/js/bootstrap.min.js usr/share/doc/haproxy/html/js/bootstrap.min.js
+usr/share/javascript/bootstrap/fonts usr/share/doc/haproxy/html/fonts
+usr/share/javascript/jquery/jquery.min.js usr/share/doc/haproxy/html/js/jquery.min.js
+usr/share/doc/haproxy/html /usr/share/doc/haproxy-doc/html
+usr/share/doc/haproxy/lua /usr/share/doc/haproxy-doc/lua
diff --git a/haproxy-doc.maintscript b/haproxy-doc.maintscript
new file mode 100644 (file)
index 0000000..2f54652
--- /dev/null
@@ -0,0 +1,2 @@
+dir_to_symlink /usr/share/doc/haproxy-doc/html /usr/share/doc/haproxy/html 1.6.3-2~
+dir_to_symlink /usr/share/doc/haproxy-doc/lua /usr/share/doc/haproxy/lua 1.6.3-2~
diff --git a/haproxy.README.Debian b/haproxy.README.Debian
new file mode 100644 (file)
index 0000000..6e3e3ab
--- /dev/null
@@ -0,0 +1,29 @@
+Syslog support
+--------------
+Upstream recommends using syslog over UDP to log from HAProxy processes, as
+this allows seamless logging from chroot'ed processes without access to
+/dev/log. However, many syslog implementations do not enable UDP syslog by
+default.
+
+The default HAProxy configuration in Debian uses /dev/log for logging and
+ships an rsyslog snippet that creates /dev/log in HAProxy's chroot and logs all
+HAProxy messages to /var/log/haproxy.log. To take advantage of this, you must
+restart rsyslog after installing this package. For other syslog daemons you
+will have to take manual measures to enable UDP logging or create /dev/log
+under HAProxy's chroot:
+a. For sysklogd, add SYSLOG="-a /var/lib/haproxy/dev/log" to
+   /etc/default/syslog.
+b. For inetutils-syslogd, add SYSLOGD_OPTS="-a /var/lib/haproxy/dev/log" to
+   /etc/default/inetutils-syslogd.
+
+Prometheus exporter
+-------------------
+HAProxy is shipped with a builtin Prometheus exporter. To enable it,
+you need to configure the Prometheus endpoint:
+
+    frontend stats
+        bind *:8404
+        http-request use-service prometheus-exporter if { path /metrics }
+        stats enable
+        stats uri /stats
+        stats refresh 10s
diff --git a/haproxy.cfg b/haproxy.cfg
new file mode 100644 (file)
index 0000000..a6f8a9c
--- /dev/null
@@ -0,0 +1,34 @@
+global
+       log /dev/log    local0
+       log /dev/log    local1 notice
+       chroot /var/lib/haproxy
+       stats socket /run/haproxy/admin.sock mode 660 level admin expose-fd listeners
+       stats timeout 30s
+       user haproxy
+       group haproxy
+       daemon
+
+       # Default SSL material locations
+       ca-base /etc/ssl/certs
+       crt-base /etc/ssl/private
+
+       # See: https://ssl-config.mozilla.org/#server=haproxy&server-version=2.0.3&config=intermediate
+        ssl-default-bind-ciphers ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:DHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384
+        ssl-default-bind-ciphersuites TLS_AES_128_GCM_SHA256:TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256
+        ssl-default-bind-options ssl-min-ver TLSv1.2 no-tls-tickets
+
+defaults
+       log     global
+       mode    http
+       option  httplog
+       option  dontlognull
+        timeout connect 5000
+        timeout client  50000
+        timeout server  50000
+       errorfile 400 /etc/haproxy/errors/400.http
+       errorfile 403 /etc/haproxy/errors/403.http
+       errorfile 408 /etc/haproxy/errors/408.http
+       errorfile 500 /etc/haproxy/errors/500.http
+       errorfile 502 /etc/haproxy/errors/502.http
+       errorfile 503 /etc/haproxy/errors/503.http
+       errorfile 504 /etc/haproxy/errors/504.http
diff --git a/haproxy.default b/haproxy.default
new file mode 100644 (file)
index 0000000..e15c193
--- /dev/null
@@ -0,0 +1,10 @@
+# Defaults file for HAProxy
+#
+# This is sourced by both, the initscript and the systemd unit file, so do not
+# treat it as a shell script fragment.
+
+# Change the config file location if needed
+#CONFIG="/etc/haproxy/haproxy.cfg"
+
+# Add extra flags here, see haproxy(1) for a few options
+#EXTRAOPTS="-de -m 16"
diff --git a/haproxy.dirs b/haproxy.dirs
new file mode 100644 (file)
index 0000000..b2e3c52
--- /dev/null
@@ -0,0 +1,4 @@
+etc/haproxy
+etc/haproxy/errors
+var/lib/haproxy
+var/lib/haproxy/dev
diff --git a/haproxy.docs b/haproxy.docs
new file mode 100644 (file)
index 0000000..b352c73
--- /dev/null
@@ -0,0 +1,9 @@
+doc/architecture.txt
+doc/configuration.txt
+doc/intro.txt
+doc/lua.txt
+doc/management.txt
+doc/network-namespaces.txt
+doc/SPOE.txt
+contrib
+README
diff --git a/haproxy.examples b/haproxy.examples
new file mode 100644 (file)
index 0000000..08088cb
--- /dev/null
@@ -0,0 +1 @@
+examples/*.cfg
diff --git a/haproxy.init b/haproxy.init
new file mode 100644 (file)
index 0000000..6f4096b
--- /dev/null
@@ -0,0 +1,197 @@
+#!/bin/sh
+### BEGIN INIT INFO
+# Provides:          haproxy
+# Required-Start:    $local_fs $network $remote_fs $syslog $named
+# Required-Stop:     $local_fs $remote_fs $syslog $named
+# Default-Start:     2 3 4 5
+# Default-Stop:      0 1 6
+# Short-Description: fast and reliable load balancing reverse proxy
+# Description:       This file should be used to start and stop haproxy.
+### END INIT INFO
+
+# Author: Arnaud Cornet <acornet@debian.org>
+
+PATH=/sbin:/usr/sbin:/bin:/usr/bin
+BASENAME=haproxy
+PIDFILE=/var/run/${BASENAME}.pid
+CONFIG=/etc/${BASENAME}/${BASENAME}.cfg
+HAPROXY=/usr/sbin/haproxy
+RUNDIR=/run/${BASENAME}
+EXTRAOPTS=
+
+test -x $HAPROXY || exit 0
+
+if [ -e /etc/default/${BASENAME} ]; then
+       . /etc/default/${BASENAME}
+fi
+
+test -f "$CONFIG" || exit 0
+
+[ -f /etc/default/rcS ] && . /etc/default/rcS
+. /lib/lsb/init-functions
+
+
+check_haproxy_config()
+{
+       $HAPROXY -c -f "$CONFIG" $EXTRAOPTS >/dev/null
+       if [ $? -eq 1 ]; then
+               log_end_msg 1
+               exit 1
+       fi
+}
+
+haproxy_start()
+{
+       [ -d "$RUNDIR" ] || mkdir "$RUNDIR"
+       chown haproxy:haproxy "$RUNDIR"
+       chmod 2775 "$RUNDIR"
+
+       check_haproxy_config
+
+       start-stop-daemon --quiet --oknodo --start --pidfile "$PIDFILE" \
+               --exec $HAPROXY -- -f "$CONFIG" -D -p "$PIDFILE" \
+               $EXTRAOPTS || return 2
+       return 0
+}
+
+haproxy_stop()
+{
+       if [ ! -f $PIDFILE ] ; then
+               # This is a success according to LSB
+               return 0
+       fi
+
+       ret=0
+       tmppid="$(mktemp)"
+
+       # HAProxy's pidfile may contain multiple PIDs, if nbproc > 1, so loop
+       # over each PID. Note that start-stop-daemon has a --pid option, but it
+       # was introduced in dpkg 1.17.6, post wheezy, so we use a temporary
+       # pidfile instead to ease backports.
+       for pid in $(cat $PIDFILE); do
+               echo "$pid" > "$tmppid"
+               start-stop-daemon --quiet --oknodo --stop \
+                       --retry 5 --pidfile "$tmppid" --exec $HAPROXY || ret=$?
+       done
+
+       rm -f "$tmppid"
+       [ $ret -eq 0 ] && rm -f $PIDFILE
+
+       return $ret
+}
+
+haproxy_reload()
+{
+       check_haproxy_config
+
+       $HAPROXY -f "$CONFIG" -p $PIDFILE -sf $(cat $PIDFILE) -D $EXTRAOPTS \
+               || return 2
+       return 0
+}
+
+haproxy_status()
+{
+       if [ ! -f $PIDFILE ] ; then
+               # program not running
+               return 3
+       fi
+
+       for pid in $(cat $PIDFILE) ; do
+               if ! ps --no-headers p "$pid" | grep haproxy > /dev/null ; then
+                       # program running, bogus pidfile
+                       return 1
+               fi
+       done
+
+       return 0
+}
+
+
+case "$1" in
+start)
+       log_daemon_msg "Starting haproxy" "${BASENAME}"
+       haproxy_start
+       ret=$?
+       case "$ret" in
+       0)
+               log_end_msg 0
+               ;;
+       1)
+               log_end_msg 1
+               echo "pid file '$PIDFILE' found, ${BASENAME} not started."
+               ;;
+       2)
+               log_end_msg 1
+               ;;
+       esac
+       exit $ret
+       ;;
+stop)
+       log_daemon_msg "Stopping haproxy" "${BASENAME}"
+       haproxy_stop
+       ret=$?
+       case "$ret" in
+       0|1)
+               log_end_msg 0
+               ;;
+       2)
+               log_end_msg 1
+               ;;
+       esac
+       exit $ret
+       ;;
+reload|force-reload)
+       log_daemon_msg "Reloading haproxy" "${BASENAME}"
+       haproxy_reload
+       ret=$?
+       case "$ret" in
+       0|1)
+               log_end_msg 0
+               ;;
+       2)
+               log_end_msg 1
+               ;;
+       esac
+       exit $ret
+       ;;
+restart)
+       log_daemon_msg "Restarting haproxy" "${BASENAME}"
+       haproxy_stop
+       haproxy_start
+       ret=$?
+       case "$ret" in
+       0)
+               log_end_msg 0
+               ;;
+       1)
+               log_end_msg 1
+               ;;
+       2)
+               log_end_msg 1
+               ;;
+       esac
+       exit $ret
+       ;;
+status)
+       haproxy_status
+       ret=$?
+       case "$ret" in
+       0)
+               echo "${BASENAME} is running."
+               ;;
+       1)
+               echo "${BASENAME} dead, but $PIDFILE exists."
+               ;;
+       *)
+               echo "${BASENAME} not running."
+               ;;
+       esac
+       exit $ret
+       ;;
+*)
+       echo "Usage: /etc/init.d/${BASENAME} {start|stop|reload|restart|status}"
+       exit 2
+       ;;
+esac
+
+:
diff --git a/haproxy.install b/haproxy.install
new file mode 100644 (file)
index 0000000..66a378c
--- /dev/null
@@ -0,0 +1,4 @@
+debian/haproxy.cfg etc/haproxy
+examples/errorfiles/*.http etc/haproxy/errors
+contrib/systemd/haproxy.service lib/systemd/system
+contrib/halog/halog usr/bin
diff --git a/haproxy.maintscript b/haproxy.maintscript
new file mode 100644 (file)
index 0000000..7a45edb
--- /dev/null
@@ -0,0 +1 @@
+mv_conffile /etc/rsyslog.d/haproxy.conf /etc/rsyslog.d/49-haproxy.conf 1.5.3-2~
diff --git a/haproxy.manpages b/haproxy.manpages
new file mode 100644 (file)
index 0000000..cee4732
--- /dev/null
@@ -0,0 +1,3 @@
+doc/haproxy.1
+doc/lua-api/_build/man/haproxy-lua.1
+debian/halog.1
diff --git a/haproxy.postinst b/haproxy.postinst
new file mode 100644 (file)
index 0000000..08feb12
--- /dev/null
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+set -e
+
+adduser --system --disabled-password --disabled-login --home /var/lib/haproxy \
+        --no-create-home --quiet --force-badname --group haproxy
+
+#DEBHELPER#
+
+if [ -n "$2" ]; then
+       if dpkg --compare-versions "$2" lt "1.8.0-1~" && [ -d /run/systemd/system ]; then
+               # Do a full restart when upgrading to 1.8 series on systemd, as
+               # the systemd wrapper is no longer there.
+               invoke-rc.d haproxy restart || true
+       elif dpkg --compare-versions "$2" gt "1.5~dev24-2~"; then
+               # Reload already running instances. Since 1.5~dev24-2 we do not stop
+               # haproxy in prerm during upgrades.
+               invoke-rc.d haproxy reload || true
+       fi
+fi
+
+exit 0
diff --git a/haproxy.postrm b/haproxy.postrm
new file mode 100644 (file)
index 0000000..5e41016
--- /dev/null
@@ -0,0 +1,16 @@
+#!/bin/sh
+
+set -e
+
+#DEBHELPER#
+
+case "$1" in
+    purge)
+        deluser --system haproxy || true
+        delgroup --system haproxy || true
+    ;;
+    *)
+    ;;
+esac
+
+exit 0
diff --git a/haproxy.tmpfile b/haproxy.tmpfile
new file mode 100644 (file)
index 0000000..9978887
--- /dev/null
@@ -0,0 +1 @@
+d /run/haproxy 2775 haproxy haproxy -
diff --git a/haproxy.vim b/haproxy.vim
new file mode 100644 (file)
index 0000000..d58d0a5
--- /dev/null
@@ -0,0 +1,2 @@
+" detect HAProxy configuration
+au BufRead,BufNewFile haproxy*.cfg              set filetype=haproxy
diff --git a/logrotate.conf b/logrotate.conf
new file mode 100644 (file)
index 0000000..ad2031f
--- /dev/null
@@ -0,0 +1,11 @@
+/var/log/haproxy.log {
+    daily
+    rotate 52
+    missingok
+    notifempty
+    compress
+    delaycompress
+    postrotate
+        /usr/lib/rsyslog/rsyslog-rotate
+    endscript
+}
diff --git a/patches/0001-BUG-CRITICAL-hpack-never-index-a-header-into-the-hea.patch b/patches/0001-BUG-CRITICAL-hpack-never-index-a-header-into-the-hea.patch
new file mode 100644 (file)
index 0000000..fd33180
--- /dev/null
@@ -0,0 +1,51 @@
+From 4e372dc350be5c72b88546bf03392a5793cea179 Mon Sep 17 00:00:00 2001
+From: Willy Tarreau <w@1wt.eu>
+Date: Sun, 29 Mar 2020 08:53:31 +0200
+Subject: BUG/CRITICAL: hpack: never index a header into the headroom after
+ wrapping
+
+The HPACK header table is implemented as a wrapping list inside a contigous
+area. Headers names and values are stored from right to left while indexes
+are stored from left to right. When there's no more room to store a new one,
+we wrap to the right again, or possibly defragment it if needed. The condition
+do use the right part (called tailroom) or the left part (called headroom)
+depends on the location of the last inserted header. After wrapping happens,
+the code forces to stick to tailroom by pretending there's no more headroom,
+so that the size fit test always fails. The problem is that nothing prevents
+from storing a header with an empty name and empty value, resulting in a
+total size of zero bytes, which satisfies the condition to use the headroom.
+Doing this in a wrapped buffer results in changing the "front" header index
+and causing miscalculations on the available size and the addresses of the
+next headers. This may even allow to overwrite some parts of the index,
+opening the possibility to perform arbitrary writes into a 32-bit relative
+address space.
+
+This patch fixes the issue by making sure the headroom is considered only
+when the buffer does not wrap, instead of relying on the zero size. This
+must be backported to all versions supporting H2, which is as far as 1.8.
+
+Many thanks to Felix Wilhelm of Google Project Zero for responsibly
+reporting this problem with a reproducer and a detailed analysis.
+---
+ src/hpack-tbl.c | 4 ++--
+ 1 file changed, 2 insertions(+), 2 deletions(-)
+
+diff --git a/src/hpack-tbl.c b/src/hpack-tbl.c
+index 70d7f35834..727ff7a17b 100644
+--- a/src/hpack-tbl.c
++++ b/src/hpack-tbl.c
+@@ -346,9 +346,9 @@ int hpack_dht_insert(struct hpack_dht *dht, struct ist name, struct ist value)
+        * room left in the tail to suit the protocol, but tests show that in
+        * practice it almost never happens in other situations so the extra
+        * test is useless and we simply fill the headroom as long as it's
+-       * available.
++       * available and we don't wrap.
+        */
+-      if (headroom >= name.len + value.len) {
++      if (prev == dht->front && headroom >= name.len + value.len) {
+               /* install upfront and update ->front */
+               dht->dte[head].addr = dht->dte[dht->front].addr - (name.len + value.len);
+               dht->front = head;
+-- 
+2.20.1
+
diff --git a/patches/0002-Use-dpkg-buildflags-to-build-halog.patch b/patches/0002-Use-dpkg-buildflags-to-build-halog.patch
new file mode 100644 (file)
index 0000000..a039316
--- /dev/null
@@ -0,0 +1,42 @@
+From: Apollon Oikonomopoulos <apoikos@gmail.com>
+Date: Tue, 2 Jul 2013 15:24:59 +0300
+Subject: Use dpkg-buildflags to build halog
+
+Forwarded: no
+Last-Update: 2013-07-02
+---
+ contrib/halog/Makefile | 16 +++++-----------
+ 1 file changed, 5 insertions(+), 11 deletions(-)
+
+diff --git a/contrib/halog/Makefile b/contrib/halog/Makefile
+index 5e687c0..ab34027 100644
+--- a/contrib/halog/Makefile
++++ b/contrib/halog/Makefile
+@@ -1,22 +1,16 @@
+ EBTREE_DIR = ../../ebtree
+ INCLUDE  = -I../../include -I$(EBTREE_DIR)
+-CC       = gcc
+-
+-# note: it is recommended to also add -fomit-frame-pointer on i386
+-OPTIMIZE = -O3
++CPPFLAGS:=$(shell dpkg-buildflags --get CPPFLAGS)
++CFLAGS:=$(shell dpkg-buildflags --get CFLAGS)
++LDFLAGS:=$(shell dpkg-buildflags --get LDFLAGS)
+-# most recent glibc provide platform-specific optimizations that make
+-# memchr faster than the generic C implementation (eg: SSE and prefetch
+-# on x86_64). Try with an without. In general, on x86_64 it's better to
+-# use memchr using the define below.
+-# DEFINE   = -DUSE_MEMCHR
+-DEFINE   =
++CC       = gcc
+ OBJS     = halog
+ halog: halog.c fgets2.c
+-      $(CC) $(OPTIMIZE) $(DEFINE) -o $@ $(INCLUDE) $(EBTREE_DIR)/ebtree.c $(EBTREE_DIR)/eb32tree.c $(EBTREE_DIR)/eb64tree.c $(EBTREE_DIR)/ebmbtree.c $(EBTREE_DIR)/ebsttree.c $(EBTREE_DIR)/ebistree.c $(EBTREE_DIR)/ebimtree.c $^
++      $(CC) $(CPPFLAGS) $(CFLAGS) $(LDFLAGS) -o $@ $(INCLUDE) $(EBTREE_DIR)/ebtree.c $(EBTREE_DIR)/eb32tree.c $(EBTREE_DIR)/eb64tree.c $(EBTREE_DIR)/ebmbtree.c $(EBTREE_DIR)/ebsttree.c $(EBTREE_DIR)/ebistree.c $(EBTREE_DIR)/ebimtree.c $^
+ clean:
+       rm -f $(OBJS) *.[oas]
diff --git a/patches/debianize-dconv.patch b/patches/debianize-dconv.patch
new file mode 100644 (file)
index 0000000..34710ce
--- /dev/null
@@ -0,0 +1,170 @@
+From: Apollon Oikonomopoulos <apoikos@debian.org>
+Date: Wed, 29 Apr 2015 13:51:49 +0300
+Subject: [PATCH] dconv: debianize
+
+ - Use Debian bootstrap and jquery packages
+ - Add Debian-related resources to the template
+ - Use the package's version instead of HAProxy's git version
+ - Strip the conversion date from the output to ensure reproducible
+   build.
+ - 2020-01-17: make get_haproxy_debian_version() return a string, for py3
+   compatibility
+
+diff --git a/debian/dconv/haproxy-dconv.py b/debian/dconv/haproxy-dconv.py
+index fe2b96dce325..702eefac6a3b 100755
+--- a/debian/dconv/haproxy-dconv.py
++++ b/debian/dconv/haproxy-dconv.py
+@@ -44,12 +44,11 @@ VERSION = ""
+ HAPROXY_GIT_VERSION = False
+ def main():
+-    global VERSION, HAPROXY_GIT_VERSION
++    global HAPROXY_GIT_VERSION
+     usage="Usage: %prog --infile <infile> --outfile <outfile>"
+     optparser = OptionParser(description='Generate HTML Document from HAProxy configuation.txt',
+-                          version=VERSION,
+                           usage=usage)
+     optparser.add_option('--infile', '-i', help='Input file mostly the configuration.txt')
+     optparser.add_option('--outfile','-o', help='Output file')
+@@ -65,11 +64,7 @@ def main():
+     os.chdir(os.path.dirname(__file__))
+-    VERSION = get_git_version()
+-    if not VERSION:
+-        sys.exit(1)
+-
+-    HAPROXY_GIT_VERSION = get_haproxy_git_version(os.path.dirname(option.infile))
++    HAPROXY_GIT_VERSION = get_haproxy_debian_version(os.path.dirname(option.infile))
+     convert(option.infile, option.outfile, option.base)
+@@ -114,6 +109,15 @@ def get_haproxy_git_version(path):
+     version = re.sub(r'-g.*', '', version)
+     return version
++def get_haproxy_debian_version(path):
++    try:
++        version = subprocess.check_output(["dpkg-parsechangelog", "-Sversion"],
++                                          cwd=os.path.join(path, ".."))
++    except subprocess.CalledProcessError:
++        return False
++
++    return version.decode("utf-8").strip()
++
+ def getTitleDetails(string):
+     array = string.split(".")
+@@ -506,7 +510,6 @@ def convert(infile, outfile, base=''):
+             keywords = keywords,
+             keywordsCount = keywordsCount,
+             keyword_conflicts = keyword_conflicts,
+-            version = VERSION,
+             date = datetime.datetime.now().strftime("%Y/%m/%d"),
+         )
+     except TopLevelLookupException:
+@@ -524,7 +527,6 @@ def convert(infile, outfile, base=''):
+             keywords = keywords,
+             keywordsCount = keywordsCount,
+             keyword_conflicts = keyword_conflicts,
+-            version = VERSION,
+             date = datetime.datetime.now().strftime("%Y/%m/%d"),
+             footer = footer
+     )
+diff --git a/debian/dconv/templates/template.html b/debian/dconv/templates/template.html
+index c72b3558c2dd..9aefa16dd82d 100644
+--- a/debian/dconv/templates/template.html
++++ b/debian/dconv/templates/template.html
+@@ -3,8 +3,8 @@
+       <head>
+               <meta charset="utf-8" />
+               <title>${headers['title']} ${headers['version']} - ${headers['subtitle']}</title>
+-              <link href="//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet" />
+-              <link href="${base}css/page.css?${version}" rel="stylesheet" />
++              <link href="${base}css/bootstrap.min.css" rel="stylesheet" />
++              <link href="${base}css/page.css" rel="stylesheet" />
+       </head>
+       <body>
+               <nav class="navbar navbar-default navbar-fixed-top" role="navigation">
+@@ -15,7 +15,7 @@
+                                       <span class="icon-bar"></span>
+                                       <span class="icon-bar"></span>
+                               </button>
+-                              <a class="navbar-brand" href="${base}index.html">${headers['title']} <small>${headers['subtitle']}</small></a>
++                              <a class="navbar-brand" href="${base}configuration.html">${headers['title']}</a>
+                       </div>
+                       <!-- /.navbar-header -->
+@@ -24,31 +24,16 @@
+                               <ul class="nav navbar-nav">
+                                       <li><a href="http://www.haproxy.org/">HAProxy home page</a></li>
+                                       <li class="dropdown">
+-                                              <a href="#" class="dropdown-toggle" data-toggle="dropdown">Versions <b class="caret"></b></a>
++                                              <a href="#" class="dropdown-toggle" data-toggle="dropdown">Debian resources <b class="caret"></b></a>
+                                               <ul class="dropdown-menu">
+                                                       ## TODO : provide a structure to dynamically generate per version links
+-                                                      <li class="dropdown-header">HAProxy 1.4</li>
+-                                                      <li><a href="${base}configuration-1.4.html">Configuration Manual <small>(stable)</small></a></li>
+-                                                      <li><a href="${base}snapshot/configuration-1.4.html">Configuration Manual <small>(snapshot)</small></a></li>
+-                                                      <li><a href="http://git.1wt.eu/git/haproxy-1.4.git/">GIT Repository</a></li>
+-                                                      <li><a href="http://www.haproxy.org/git/?p=haproxy-1.4.git">Browse repository</a></li>
+-                                                      <li><a href="http://www.haproxy.org/download/1.4/">Browse directory</a></li>
+-                                                      <li class="divider"></li>
+-                                                      <li class="dropdown-header">HAProxy 1.5</li>
+-                                                      <li><a href="${base}configuration-1.5.html">Configuration Manual <small>(stable)</small></a></li>
+-                                                      <li><a href="${base}snapshot/configuration-1.5.html">Configuration Manual <small>(snapshot)</small></a></li>
+-                                                      <li><a href="http://git.1wt.eu/git/haproxy-1.5.git/">GIT Repository</a></li>
+-                                                      <li><a href="http://www.haproxy.org/git/?p=haproxy-1.5.git">Browse repository</a></li>
+-                                                      <li><a href="http://www.haproxy.org/download/1.5/">Browse directory</a></li>
+-                            <li class="divider"></li>
+-                            <li class="dropdown-header">HAProxy 1.6</li>
+-                            <li><a href="${base}configuration-1.6.html">Configuration Manual <small>(stable)</small></a></li>
+-                            <li><a href="${base}snapshot/configuration-1.6.html">Configuration Manual <small>(snapshot)</small></a></li>
+-                            <li><a href="${base}intro-1.6.html">Starter Guide <small>(stable)</small></a></li>
+-                            <li><a href="${base}snapshot/intro-1.6.html">Starter Guide <small>(snapshot)</small></a></li>
+-                            <li><a href="http://git.1wt.eu/git/haproxy.git/">GIT Repository</a></li>
+-                            <li><a href="http://www.haproxy.org/git/?p=haproxy.git">Browse repository</a></li>
+-                            <li><a href="http://www.haproxy.org/download/1.6/">Browse directory</a></li>
++                                                  <li><a href="https://bugs.debian.org/src:haproxy">Bug Tracking System</a></li>
++                                                  <li><a href="https://packages.debian.org/haproxy">Package page</a></li>
++                                                  <li><a href="http://tracker.debian.org/pkg/haproxy">Package Tracking System</a></li>
++                                                  <li class="divider"></li>
++                                                    <li><a href="${base}intro.html">Starter Guide</a></li>
++                                                    <li><a href="${base}configuration.html">Configuration Manual</a></li>
++                                                  <li><a href="http://anonscm.debian.org/gitweb/?p=pkg-haproxy/haproxy.git">Package Git Repository</a></li>
+                                               </ul>
+                                       </li>
+                               </ul>
+@@ -72,7 +57,7 @@
+                                       The feature is automatically disabled when the search field is focused.
+                               </p>
+                               <p class="text-right">
+-                                      <small>Converted with <a href="https://github.com/cbonte/haproxy-dconv">haproxy-dconv</a> v<b>${version}</b> on <b>${date}</b></small>
++                                      <small>Converted with <a href="https://github.com/cbonte/haproxy-dconv">haproxy-dconv</a></small>
+                               </p>
+                       </div>
+                       <!-- /.sidebar -->
+@@ -83,7 +68,7 @@
+                                               <div class="text-center">
+                                                       <h1>${headers['title']}</h1>
+                                                       <h2>${headers['subtitle']}</h2>
+-                                                      <p><strong>${headers['version']}</strong></p>
++                                                      <p><strong>${headers['version']} (Debian)</strong></p>
+                                                       <p>
+                                                               <a href="http://www.haproxy.org/" title="HAProxy Home Page"><img src="${base}img/logo-med.png" /></a><br>
+                                                               ${headers['author']}<br>
+@@ -114,9 +99,9 @@
+               </div>
+               <!-- /#wrapper -->
+-              <script src="//cdnjs.cloudflare.com/ajax/libs/jquery/1.11.0/jquery.min.js"></script>
+-              <script src="//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.1.1/js/bootstrap.min.js"></script>
+-              <script src="//cdnjs.cloudflare.com/ajax/libs/typeahead.js/0.11.1/typeahead.bundle.min.js"></script>
++              <script src="${base}js/jquery.min.js"></script>
++              <script src="${base}js/bootstrap.min.js"></script>
++              <script src="${base}js/typeahead.bundle.js"></script>
+               <script>
+                       /* Keyword search */
+                       var searchFocus = false
diff --git a/patches/haproxy.service-add-documentation.patch b/patches/haproxy.service-add-documentation.patch
new file mode 100644 (file)
index 0000000..380b39c
--- /dev/null
@@ -0,0 +1,23 @@
+From: Debian HAProxy Maintainers
+ <pkg-haproxy-maintainers@lists.alioth.debian.org>
+Date: Sun, 25 Mar 2018 11:31:50 +0200
+Subject: Add documentation field to the systemd unit
+
+Forwarded: no
+Last-Update: 2014-01-03
+---
+ contrib/systemd/haproxy.service.in | 2 ++
+ 1 file changed, 2 insertions(+)
+
+diff --git a/contrib/systemd/haproxy.service.in b/contrib/systemd/haproxy.service.in
+index 243acf2..ac88c37 100644
+--- a/contrib/systemd/haproxy.service.in
++++ b/contrib/systemd/haproxy.service.in
+@@ -1,5 +1,7 @@
+ [Unit]
+ Description=HAProxy Load Balancer
++Documentation=man:haproxy(1)
++Documentation=file:/usr/share/doc/haproxy/configuration.txt.gz
+ After=network.target rsyslog.service
+ [Service]
diff --git a/patches/haproxy.service-start-after-syslog.patch b/patches/haproxy.service-start-after-syslog.patch
new file mode 100644 (file)
index 0000000..1e8e1e4
--- /dev/null
@@ -0,0 +1,27 @@
+From: Apollon Oikonomopoulos <apoikos@debian.org>
+Date: Sun, 25 Mar 2018 11:31:50 +0200
+Subject: Start after rsyslog.service
+
+As HAProxy is running chrooted by default, we rely on an additional syslog
+socket created by rsyslog inside the chroot for logging. As this socket cannot
+trigger syslog activation, we explicitly order HAProxy after rsyslog.service.
+Note that we are not using syslog.service here, since the additional socket is
+rsyslog-specific.
+Forwarded: no
+Last-Update: 2017-12-01
+---
+ contrib/systemd/haproxy.service.in | 2 +-
+ 1 file changed, 1 insertion(+), 1 deletion(-)
+
+diff --git a/contrib/systemd/haproxy.service.in b/contrib/systemd/haproxy.service.in
+index 74e66e3..243acf2 100644
+--- a/contrib/systemd/haproxy.service.in
++++ b/contrib/systemd/haproxy.service.in
+@@ -1,6 +1,6 @@
+ [Unit]
+ Description=HAProxy Load Balancer
+-After=network.target
++After=network.target rsyslog.service
+ [Service]
+ EnvironmentFile=-/etc/default/haproxy
diff --git a/patches/series b/patches/series
new file mode 100644 (file)
index 0000000..b57f722
--- /dev/null
@@ -0,0 +1,9 @@
+0002-Use-dpkg-buildflags-to-build-halog.patch
+haproxy.service-start-after-syslog.patch
+haproxy.service-add-documentation.patch
+
+# 20200402 security issue (CVE-2020-11100) about HTTP/2 HPACK header table
+0001-BUG-CRITICAL-hpack-never-index-a-header-into-the-hea.patch
+
+# applied during the build process:
+# debianize-dconv.patch
diff --git a/rsyslog.conf b/rsyslog.conf
new file mode 100644 (file)
index 0000000..36a1261
--- /dev/null
@@ -0,0 +1,9 @@
+# Create an additional socket in haproxy's chroot in order to allow logging via
+# /dev/log to chroot'ed HAProxy processes
+$AddUnixListenSocket /var/lib/haproxy/dev/log
+
+# Send HAProxy messages to a dedicated logfile
+:programname, startswith, "haproxy" {
+  /var/log/haproxy.log
+  stop
+}
diff --git a/rules b/rules
new file mode 100755 (executable)
index 0000000..887d0f9
--- /dev/null
+++ b/rules
@@ -0,0 +1,92 @@
+#!/usr/bin/make -f
+
+include /usr/share/dpkg/pkg-info.mk
+include /usr/share/dpkg/architecture.mk
+include /usr/share/dpkg/buildflags.mk
+
+export DEB_BUILD_MAINT_OPTIONS = hardening=+all
+export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed
+
+MAKEARGS=DESTDIR=debian/haproxy \
+        PREFIX=/usr \
+        IGNOREGIT=true \
+        MANDIR=/usr/share/man \
+        DOCDIR=/usr/share/doc/haproxy \
+        USE_PCRE2=1 \
+        USE_PCRE2_JIT=1 \
+        USE_OPENSSL=1 \
+        USE_ZLIB=1 \
+        USE_LUA=1 \
+        LUA_INC=/usr/include/lua5.3 \
+        EXTRA_OBJS="contrib/prometheus-exporter/service-prometheus.o"
+
+ifeq ($(DEB_HOST_ARCH_OS),linux)
+        MAKEARGS+= TARGET=linux-glibc USE_SYSTEMD=1
+else ifeq ($(DEB_HOST_ARCH_OS),kfreebsd)
+        MAKEARGS+= TARGET=freebsd
+else
+       MAKEARGS+= TARGET=generic
+endif
+
+ifneq ($(filter amd64 i386,$(DEB_HOST_ARCH_CPU)),)
+       MAKEARGS+= USE_REGPARM=1
+endif
+
+ifneq ($(filter armel mips mipsel m68k powerpc powerpcspe sh4,$(DEB_HOST_ARCH)),)
+       MAKEARGS+= ADDLIB="-latomic -Wl,--no-as-needed -lgcc_s -Wl,--as-needed"
+else
+       MAKEARGS+= ADDLIB="-Wl,--no-as-needed -lgcc_s -Wl,--as-needed"
+endif
+
+MAKEARGS += DEBUG_CFLAGS="$(CFLAGS) $(CPPFLAGS)"
+MAKEARGS += LDFLAGS="$(LDFLAGS)"
+MAKEARGS += VERSION="$(DEB_VERSION_UPSTREAM)"
+MAKEARGS += SUBVERS="-$(lastword $(subst -, ,$(DEB_VERSION)))"
+MAKEARGS += VERDATE="$(shell TZ=UTC date -d "@$(SOURCE_DATE_EPOCH)" "+%Y/%m/%d")"
+
+%:
+       dh $@ --with sphinxdoc
+
+override_dh_auto_configure:
+
+override_dh_auto_build-arch:
+       make $(MAKEARGS)
+       make -C contrib/systemd $(MAKEARGS)
+       dh_auto_build -Dcontrib/halog
+       $(MAKE) -C doc/lua-api man
+
+override_dh_auto_build-indep:
+       # Build the HTML documentation, after patching dconv
+       patch -p1 < $(CURDIR)/debian/patches/debianize-dconv.patch
+       for doc in intro configuration management; do \
+               python3 -B $(CURDIR)/debian/dconv/haproxy-dconv.py \
+                       -i $(CURDIR)/doc/$${doc}.txt \
+                       -o $(CURDIR)/doc/$${doc}.html ;\
+       done
+       patch -p1 -R < $(CURDIR)/debian/patches/debianize-dconv.patch
+       $(MAKE) -C doc/lua-api html
+
+override_dh_auto_clean:
+       make -C contrib/systemd clean
+       $(MAKE) -C doc/lua-api clean
+       dh_auto_clean
+       dh_auto_clean -Dcontrib/halog
+
+override_dh_auto_install-arch:
+       make $(MAKEARGS) install
+       install -m 0644 -D debian/rsyslog.conf debian/haproxy/etc/rsyslog.d/49-haproxy.conf
+       install -m 0644 -D debian/logrotate.conf debian/haproxy/etc/logrotate.d/haproxy
+
+override_dh_auto_install-indep:
+
+override_dh_installdocs:
+       dh_installdocs -Xsystemd/ -Xhalog/
+
+override_dh_installexamples:
+       dh_installexamples -X build.cfg
+
+override_dh_installinit:
+       dh_installinit --no-restart-after-upgrade --no-stop-on-upgrade
+
+override_dh_strip:
+       dh_strip --dbgsym-migration="haproxy-dbg"
diff --git a/source/format b/source/format
new file mode 100644 (file)
index 0000000..163aaf8
--- /dev/null
@@ -0,0 +1 @@
+3.0 (quilt)
diff --git a/source/include-binaries b/source/include-binaries
new file mode 100644 (file)
index 0000000..a46fd83
--- /dev/null
@@ -0,0 +1,3 @@
+debian/dconv/css/check.png
+debian/dconv/css/cross.png
+debian/dconv/img/logo-med.png
diff --git a/tests/cli b/tests/cli
new file mode 100644 (file)
index 0000000..941b4af
--- /dev/null
+++ b/tests/cli
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+set -e
+
+echo "show stat" | socat STDIO UNIX-CONNECT:/run/haproxy/admin.sock | grep "^#"
+
+echo "show info" | socat STDIO UNIX-CONNECT:/run/haproxy/admin.sock | grep "^Version:"
diff --git a/tests/control b/tests/control
new file mode 100644 (file)
index 0000000..70649e3
--- /dev/null
@@ -0,0 +1,7 @@
+Tests: cli
+Depends: haproxy, socat
+Restrictions: needs-root
+
+Tests: proxy-localhost
+Depends: haproxy, wget, apache2
+Restrictions: needs-root, allow-stderr, isolation-container
diff --git a/tests/proxy-localhost b/tests/proxy-localhost
new file mode 100644 (file)
index 0000000..b1279e8
--- /dev/null
@@ -0,0 +1,48 @@
+#!/bin/sh
+
+set -eux
+
+cat > /etc/haproxy/haproxy.cfg <<EOF
+global
+        chroot /var/lib/haproxy
+        user haproxy
+        group haproxy
+        daemon
+        maxconn 4096
+
+defaults
+        log global
+        option dontlognull
+        option redispatch
+        retries 3
+        timeout client 50s
+        timeout connect 10s
+        timeout http-request 5s
+        timeout server 50s
+        maxconn 4096
+
+frontend test-front
+    bind *:8080
+    mode http
+    default_backend test-back
+
+backend test-back
+    mode http
+    stick store-request src
+    stick-table type ip size 256k expire 30m
+    server test-1 localhost:80
+EOF
+
+service haproxy restart
+
+# index.html is shipped with apache2
+# Download it via haproxy and compare
+if wget -t1 http://localhost:8080 -O- | cmp /var/www/html/index.html -; then
+    echo "OK: index.html downloaded via haproxy matches the source file."
+else
+    echo "FAIL: downloaded index.html via haproxy is different from the"
+    echo "      file delivered by apache."
+    exit 1
+fi
+
+exit 0
diff --git a/vim-haproxy.install b/vim-haproxy.install
new file mode 100644 (file)
index 0000000..adcbbad
--- /dev/null
@@ -0,0 +1,3 @@
+debian/vim-haproxy.yaml /usr/share/vim/registry
+debian/haproxy.vim /usr/share/vim/addons/ftdetect
+contrib/syntax-highlight/haproxy.vim /usr/share/vim/addons/syntax
diff --git a/vim-haproxy.yaml b/vim-haproxy.yaml
new file mode 100644 (file)
index 0000000..f87e84f
--- /dev/null
@@ -0,0 +1,5 @@
+addon: haproxy
+description: "Syntax highlighting for HAProxy"
+files:
+  - syntax/haproxy.vim
+  - ftdetect/haproxy.vim
diff --git a/watch b/watch
new file mode 100644 (file)
index 0000000..41e8da8
--- /dev/null
+++ b/watch
@@ -0,0 +1,2 @@
+version=3
+opts="uversionmangle=s/-(dev\d+)/~$1/" https://www.haproxy.org/download/2.0/src/ haproxy-(2\.0(?:\.|-dev)\d+)\.(?:tgz|tbz2|tar\.(?:gz|bz2|xz))