From 3f82e9323c760b4a1eccc80a89512964b4f36a45 Mon Sep 17 00:00:00 2001 From: Michael Jerris Date: Wed, 20 Dec 2006 02:58:55 +0000 Subject: [PATCH] remove generated files and add svn:ignore git-svn-id: http://svn.freeswitch.org/svn/freeswitch/trunk@3747 d0543943-73ff-0310-b7d9-9358b9ac24b2 --- libs/curl/Makefile | 831 ----- libs/curl/lib/ca-bundle.h | 2 - libs/curl/src/hugehelp.c | 6003 ------------------------------------- 3 files changed, 6836 deletions(-) delete mode 100644 libs/curl/Makefile delete mode 100644 libs/curl/lib/ca-bundle.h delete mode 100644 libs/curl/src/hugehelp.c diff --git a/libs/curl/Makefile b/libs/curl/Makefile deleted file mode 100644 index c993e1d204..0000000000 --- a/libs/curl/Makefile +++ /dev/null @@ -1,831 +0,0 @@ -# Makefile.in generated by automake 1.9.6 from Makefile.am. -# Makefile. Generated from Makefile.in by configure. - -# Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, -# 2003, 2004, 2005 Free Software Foundation, Inc. -# This Makefile.in is free software; the Free Software Foundation -# gives unlimited permission to copy and/or distribute it, -# with or without modifications, as long as this notice is preserved. - -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY, to the extent permitted by law; without -# even the implied warranty of MERCHANTABILITY or FITNESS FOR A -# PARTICULAR PURPOSE. - - - -#*************************************************************************** -# _ _ ____ _ -# Project ___| | | | _ \| | -# / __| | | | |_) | | -# | (__| |_| | _ <| |___ -# \___|\___/|_| \_\_____| -# -# Copyright (C) 1998 - 2005, Daniel Stenberg, , et al. -# -# This software is licensed as described in the file COPYING, which -# you should have received as part of this distribution. The terms -# are also available at http://curl.haxx.se/docs/copyright.html. -# -# You may opt to use, copy, modify, merge, publish, distribute and/or sell -# copies of the Software, and permit persons to whom the Software is -# furnished to do so, under the terms of the COPYING file. -# -# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY -# KIND, either express or implied. -# -# $Id: Makefile.am,v 1.56 2005-11-16 07:20:58 bagder Exp $ -########################################################################### - - -srcdir = . -top_srcdir = . - -pkgdatadir = $(datadir)/curl -pkglibdir = $(libdir)/curl -pkgincludedir = $(includedir)/curl -top_builddir = . -am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd -INSTALL = /usr/bin/install -c -install_sh_DATA = $(install_sh) -c -m 644 -install_sh_PROGRAM = $(install_sh) -c -install_sh_SCRIPT = $(install_sh) -c -INSTALL_HEADER = $(INSTALL_DATA) -transform = $(program_transform_name) -NORMAL_INSTALL = : -PRE_INSTALL = : -POST_INSTALL = : -NORMAL_UNINSTALL = : -PRE_UNINSTALL = : -POST_UNINSTALL = : -build_triplet = x86_64-unknown-linux-gnu -host_triplet = x86_64-unknown-linux-gnu -DIST_COMMON = README $(am__configure_deps) $(srcdir)/Makefile.am \ - $(srcdir)/Makefile.in $(srcdir)/curl-config.in \ - $(srcdir)/libcurl.pc.in $(top_srcdir)/configure COPYING \ - compile config.guess config.sub depcomp install-sh ltmain.sh \ - missing mkinstalldirs -subdir = . -ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 -am__aclocal_m4_deps = $(top_srcdir)/acinclude.m4 \ - $(top_srcdir)/configure.ac -am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ - $(ACLOCAL_M4) -am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ - configure.lineno configure.status.lineno -mkinstalldirs = $(SHELL) $(top_srcdir)/mkinstalldirs -CONFIG_HEADER = $(top_builddir)/lib/config.h \ - $(top_builddir)/src/config.h -CONFIG_CLEAN_FILES = curl-config libcurl.pc -am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(pkgconfigdir)" -binSCRIPT_INSTALL = $(INSTALL_SCRIPT) -SCRIPTS = $(bin_SCRIPTS) -SOURCES = -DIST_SOURCES = -RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ - html-recursive info-recursive install-data-recursive \ - install-exec-recursive install-info-recursive \ - install-recursive installcheck-recursive installdirs-recursive \ - pdf-recursive ps-recursive uninstall-info-recursive \ - uninstall-recursive -am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; -am__vpath_adj = case $$p in \ - $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ - *) f=$$p;; \ - esac; -am__strip_dir = `echo $$p | sed -e 's|^.*/||'`; -pkgconfigDATA_INSTALL = $(INSTALL_DATA) -DATA = $(pkgconfig_DATA) -ETAGS = etags -CTAGS = ctags -DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) -distdir = $(PACKAGE)-$(VERSION) -top_distdir = $(distdir) -am__remove_distdir = \ - { test ! -d $(distdir) \ - || { find $(distdir) -type d ! -perm -200 -exec chmod u+w {} ';' \ - && rm -fr $(distdir); }; } -DIST_ARCHIVES = $(distdir).tar.gz -GZIP_ENV = --best -distuninstallcheck_listfiles = find . -type f -print -distcleancheck_listfiles = find . -type f -print -ACLOCAL = ${SHELL} "/usr/src/freeswitch.working/libs/curl/missing" --run aclocal-1.9 -AMDEP_FALSE = # -AMDEP_TRUE = -AMTAR = ${SHELL} "/usr/src/freeswitch.working/libs/curl/missing" --run tar -AR = ar -AS = as -AUTOCONF = ${SHELL} "/usr/src/freeswitch.working/libs/curl/missing" --run autoconf -AUTOHEADER = ${SHELL} "/usr/src/freeswitch.working/libs/curl/missing" --run autoheader -AUTOMAKE = ${SHELL} "/usr/src/freeswitch.working/libs/curl/missing" --run automake-1.9 -AWK = gawk -CABUNDLE_FALSE = # -CABUNDLE_TRUE = -CC = gcc -CCDEPMODE = depmode=gcc3 -CFLAGS = -CPP = gcc -E -CPPFLAGS = -I/usr/kerberos/include -CROSSCOMPILING_FALSE = -CROSSCOMPILING_TRUE = # -CURL_CA_BUNDLE = "${prefix}/share/curl/curl-ca-bundle.crt" -CURL_DISABLE_COOKIES = -CURL_DISABLE_CRYPTO_AUTH = -CURL_DISABLE_DICT = -CURL_DISABLE_FILE = -CURL_DISABLE_FTP = -CURL_DISABLE_HTTP = -CURL_DISABLE_LDAP = -CURL_DISABLE_TELNET = -CURL_DISABLE_TFTP = -CURL_DISABLE_VERBOSE_STRINGS = -CURL_EXTERN_SYMBOL = -CURL_HIDDEN_SYMBOLS = -CXX = g++ -CXXCPP = g++ -E -CXXDEPMODE = depmode=gcc3 -CXXFLAGS = -g -O2 -CYGPATH_W = echo -DEFS = -DHAVE_CONFIG_H -DEPDIR = .deps -DLLTOOL = dlltool -ECHO = echo -ECHO_C = -ECHO_N = -n -ECHO_T = -EGREP = /bin/grep -E -EXEEXT = -F77 = -FFLAGS = -GREP = /bin/grep -HAVE_ARES = -HAVE_LIBZ = 1 -HAVE_LIBZ_FALSE = # -HAVE_LIBZ_TRUE = -IDN_ENABLED = -INSTALL_DATA = ${INSTALL} -m 644 -INSTALL_PROGRAM = ${INSTALL} -INSTALL_SCRIPT = ${INSTALL} -INSTALL_STRIP_PROGRAM = ${SHELL} $(install_sh) -c -s -IPV6_ENABLED = 1 -KRB4_ENABLED = -LDFLAGS = -L/usr/kerberos/lib -L/usr/kerberos/lib64 -LIBOBJS = -LIBS = -lssl -lcrypto -ldl -lssl -lcrypto -lgssapi_krb5 -lkrb5 -lcom_err -lk5crypto -lresolv -ldl -lz -lz -LIBTOOL = $(SHELL) $(top_builddir)/libtool -LN_S = ln -s -LTLIBOBJS = -MAINT = # -MAINTAINER_MODE_FALSE = -MAINTAINER_MODE_TRUE = # -MAKEINFO = ${SHELL} "/usr/src/freeswitch.working/libs/curl/missing" --run makeinfo -MANOPT = -man -MIMPURE_FALSE = -MIMPURE_TRUE = # -NO_UNDEFINED_FALSE = -NO_UNDEFINED_TRUE = # -NROFF = /usr/bin/gnroff -OBJDUMP = objdump -OBJEXT = o -PACKAGE = curl -PACKAGE_BUGREPORT = a suitable curl mailing list => http://curl.haxx.se/mail/ -PACKAGE_NAME = curl -PACKAGE_STRING = curl - -PACKAGE_TARNAME = curl -PACKAGE_VERSION = - -PATH_SEPARATOR = : -PERL = /usr/local/bin/perl -PKGADD_NAME = cURL - a client that groks URLs -PKGADD_PKG = HAXXcurl -PKGADD_VENDOR = curl.haxx.se -PKGCONFIG = /usr/bin/pkg-config -RANDOM_FILE = /dev/urandom -RANLIB = ranlib -SED = /bin/sed -SET_MAKE = -SHELL = /bin/sh -STRIP = strip -USE_GNUTLS = -USE_MANUAL_FALSE = # -USE_MANUAL_TRUE = -USE_SSLEAY = 1 -USE_WINDOWS_SSPI = -VERSION = 7.16.0 -VERSIONNUM = 071000 -ac_ct_CC = gcc -ac_ct_CXX = g++ -ac_ct_F77 = -am__fastdepCC_FALSE = # -am__fastdepCC_TRUE = -am__fastdepCXX_FALSE = # -am__fastdepCXX_TRUE = -am__include = include -am__leading_dot = . -am__quote = -am__tar = ${AMTAR} chof - "$$tardir" -am__untar = ${AMTAR} xf - -bindir = ${exec_prefix}/bin -build = x86_64-unknown-linux-gnu -build_alias = -build_cpu = x86_64 -build_os = linux-gnu -build_vendor = unknown -datadir = ${datarootdir} -datarootdir = ${prefix}/share -docdir = ${datarootdir}/doc/${PACKAGE_TARNAME} -dvidir = ${docdir} -exec_prefix = ${prefix} -host = x86_64-unknown-linux-gnu -host_alias = -host_cpu = x86_64 -host_os = linux-gnu -host_vendor = unknown -htmldir = ${docdir} -includedir = ${prefix}/include -infodir = ${datarootdir}/info -install_sh = /usr/src/freeswitch.working/libs/curl/install-sh -libdir = ${exec_prefix}/lib -libexecdir = ${exec_prefix}/libexec -localedir = ${datarootdir}/locale -localstatedir = ${prefix}/var -mandir = ${datarootdir}/man -mkdir_p = mkdir -p -- -oldincludedir = /usr/include -pdfdir = ${docdir} -prefix = /usr/local/freeswitch.test -program_transform_name = s,x,x, -psdir = ${docdir} -sbindir = ${exec_prefix}/sbin -sharedstatedir = ${prefix}/com -subdirs = -sysconfdir = ${prefix}/etc -target_alias = -AUTOMAKE_OPTIONS = foreign -EXTRA_DIST = CHANGES COPYING maketgz reconf Makefile.dist curl-config.in \ - curl-style.el sample.emacs RELEASE-NOTES buildconf buildconf.bat libcurl.pc.in - -bin_SCRIPTS = curl-config -SUBDIRS = lib src -DIST_SUBDIRS = $(SUBDIRS) tests include packages docs -pkgconfigdir = $(libdir)/pkgconfig -pkgconfig_DATA = libcurl.pc -all: all-recursive - -.SUFFIXES: -am--refresh: - @: -$(srcdir)/Makefile.in: # $(srcdir)/Makefile.am $(am__configure_deps) - @for dep in $?; do \ - case '$(am__configure_deps)' in \ - *$$dep*) \ - echo ' cd $(srcdir) && $(AUTOMAKE) --foreign '; \ - cd $(srcdir) && $(AUTOMAKE) --foreign \ - && exit 0; \ - exit 1;; \ - esac; \ - done; \ - echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign Makefile'; \ - cd $(top_srcdir) && \ - $(AUTOMAKE) --foreign Makefile -.PRECIOUS: Makefile -Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status - @case '$?' in \ - *config.status*) \ - echo ' $(SHELL) ./config.status'; \ - $(SHELL) ./config.status;; \ - *) \ - echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ - cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ - esac; - -$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) - $(SHELL) ./config.status --recheck - -$(top_srcdir)/configure: # $(am__configure_deps) - cd $(srcdir) && $(AUTOCONF) -$(ACLOCAL_M4): # $(am__aclocal_m4_deps) - cd $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) -curl-config: $(top_builddir)/config.status $(srcdir)/curl-config.in - cd $(top_builddir) && $(SHELL) ./config.status $@ -libcurl.pc: $(top_builddir)/config.status $(srcdir)/libcurl.pc.in - cd $(top_builddir) && $(SHELL) ./config.status $@ -install-binSCRIPTS: $(bin_SCRIPTS) - @$(NORMAL_INSTALL) - test -z "$(bindir)" || $(mkdir_p) "$(DESTDIR)$(bindir)" - @list='$(bin_SCRIPTS)'; for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - if test -f $$d$$p; then \ - f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \ - echo " $(binSCRIPT_INSTALL) '$$d$$p' '$(DESTDIR)$(bindir)/$$f'"; \ - $(binSCRIPT_INSTALL) "$$d$$p" "$(DESTDIR)$(bindir)/$$f"; \ - else :; fi; \ - done - -uninstall-binSCRIPTS: - @$(NORMAL_UNINSTALL) - @list='$(bin_SCRIPTS)'; for p in $$list; do \ - f=`echo "$$p" | sed 's|^.*/||;$(transform)'`; \ - echo " rm -f '$(DESTDIR)$(bindir)/$$f'"; \ - rm -f "$(DESTDIR)$(bindir)/$$f"; \ - done - -mostlyclean-libtool: - -rm -f *.lo - -clean-libtool: - -rm -rf .libs _libs - -distclean-libtool: - -rm -f libtool -uninstall-info-am: -install-pkgconfigDATA: $(pkgconfig_DATA) - @$(NORMAL_INSTALL) - test -z "$(pkgconfigdir)" || $(mkdir_p) "$(DESTDIR)$(pkgconfigdir)" - @list='$(pkgconfig_DATA)'; for p in $$list; do \ - if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ - f=$(am__strip_dir) \ - echo " $(pkgconfigDATA_INSTALL) '$$d$$p' '$(DESTDIR)$(pkgconfigdir)/$$f'"; \ - $(pkgconfigDATA_INSTALL) "$$d$$p" "$(DESTDIR)$(pkgconfigdir)/$$f"; \ - done - -uninstall-pkgconfigDATA: - @$(NORMAL_UNINSTALL) - @list='$(pkgconfig_DATA)'; for p in $$list; do \ - f=$(am__strip_dir) \ - echo " rm -f '$(DESTDIR)$(pkgconfigdir)/$$f'"; \ - rm -f "$(DESTDIR)$(pkgconfigdir)/$$f"; \ - done - -# This directory's subdirectories are mostly independent; you can cd -# into them and run `make' without going through this Makefile. -# To change the values of `make' variables: instead of editing Makefiles, -# (1) if the variable is set in `config.status', edit `config.status' -# (which will cause the Makefiles to be regenerated when you run `make'); -# (2) otherwise, pass the desired values on the `make' command line. -$(RECURSIVE_TARGETS): - @failcom='exit 1'; \ - for f in x $$MAKEFLAGS; do \ - case $$f in \ - *=* | --[!k]*);; \ - *k*) failcom='fail=yes';; \ - esac; \ - done; \ - dot_seen=no; \ - target=`echo $@ | sed s/-recursive//`; \ - list='$(SUBDIRS)'; for subdir in $$list; do \ - echo "Making $$target in $$subdir"; \ - if test "$$subdir" = "."; then \ - dot_seen=yes; \ - local_target="$$target-am"; \ - else \ - local_target="$$target"; \ - fi; \ - (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ - || eval $$failcom; \ - done; \ - if test "$$dot_seen" = "no"; then \ - $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ - fi; test -z "$$fail" - -mostlyclean-recursive clean-recursive distclean-recursive \ -maintainer-clean-recursive: - @failcom='exit 1'; \ - for f in x $$MAKEFLAGS; do \ - case $$f in \ - *=* | --[!k]*);; \ - *k*) failcom='fail=yes';; \ - esac; \ - done; \ - dot_seen=no; \ - case "$@" in \ - distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ - *) list='$(SUBDIRS)' ;; \ - esac; \ - rev=''; for subdir in $$list; do \ - if test "$$subdir" = "."; then :; else \ - rev="$$subdir $$rev"; \ - fi; \ - done; \ - rev="$$rev ."; \ - target=`echo $@ | sed s/-recursive//`; \ - for subdir in $$rev; do \ - echo "Making $$target in $$subdir"; \ - if test "$$subdir" = "."; then \ - local_target="$$target-am"; \ - else \ - local_target="$$target"; \ - fi; \ - (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ - || eval $$failcom; \ - done && test -z "$$fail" -tags-recursive: - list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ - done -ctags-recursive: - list='$(SUBDIRS)'; for subdir in $$list; do \ - test "$$subdir" = . || (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ - done - -ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) ' { files[$$0] = 1; } \ - END { for (i in files) print i; }'`; \ - mkid -fID $$unique -tags: TAGS - -TAGS: tags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - tags=; \ - here=`pwd`; \ - if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ - include_option=--etags-include; \ - empty_fix=.; \ - else \ - include_option=--include; \ - empty_fix=; \ - fi; \ - list='$(SUBDIRS)'; for subdir in $$list; do \ - if test "$$subdir" = .; then :; else \ - test ! -f $$subdir/TAGS || \ - tags="$$tags $$include_option=$$here/$$subdir/TAGS"; \ - fi; \ - done; \ - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) ' { files[$$0] = 1; } \ - END { for (i in files) print i; }'`; \ - if test -z "$(ETAGS_ARGS)$$tags$$unique"; then :; else \ - test -n "$$unique" || unique=$$empty_fix; \ - $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ - $$tags $$unique; \ - fi -ctags: CTAGS -CTAGS: ctags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ - $(TAGS_FILES) $(LISP) - tags=; \ - here=`pwd`; \ - list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ - unique=`for i in $$list; do \ - if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ - done | \ - $(AWK) ' { files[$$0] = 1; } \ - END { for (i in files) print i; }'`; \ - test -z "$(CTAGS_ARGS)$$tags$$unique" \ - || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ - $$tags $$unique - -GTAGS: - here=`$(am__cd) $(top_builddir) && pwd` \ - && cd $(top_srcdir) \ - && gtags -i $(GTAGS_ARGS) $$here - -distclean-tags: - -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags - -distdir: $(DISTFILES) - $(am__remove_distdir) - mkdir $(distdir) - $(mkdir_p) $(distdir)/. $(distdir)/packages/AIX/RPM $(distdir)/packages/EPM $(distdir)/packages/Linux/RPM - @srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; \ - topsrcdirstrip=`echo "$(top_srcdir)" | sed 's|.|.|g'`; \ - list='$(DISTFILES)'; for file in $$list; do \ - case $$file in \ - $(srcdir)/*) file=`echo "$$file" | sed "s|^$$srcdirstrip/||"`;; \ - $(top_srcdir)/*) file=`echo "$$file" | sed "s|^$$topsrcdirstrip/|$(top_builddir)/|"`;; \ - esac; \ - if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ - dir=`echo "$$file" | sed -e 's,/[^/]*$$,,'`; \ - if test "$$dir" != "$$file" && test "$$dir" != "."; then \ - dir="/$$dir"; \ - $(mkdir_p) "$(distdir)$$dir"; \ - else \ - dir=''; \ - fi; \ - if test -d $$d/$$file; then \ - if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ - cp -pR $(srcdir)/$$file $(distdir)$$dir || exit 1; \ - fi; \ - cp -pR $$d/$$file $(distdir)$$dir || exit 1; \ - else \ - test -f $(distdir)/$$file \ - || cp -p $$d/$$file $(distdir)/$$file \ - || exit 1; \ - fi; \ - done - list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ - if test "$$subdir" = .; then :; else \ - test -d "$(distdir)/$$subdir" \ - || $(mkdir_p) "$(distdir)/$$subdir" \ - || exit 1; \ - distdir=`$(am__cd) $(distdir) && pwd`; \ - top_distdir=`$(am__cd) $(top_distdir) && pwd`; \ - (cd $$subdir && \ - $(MAKE) $(AM_MAKEFLAGS) \ - top_distdir="$$top_distdir" \ - distdir="$$distdir/$$subdir" \ - distdir) \ - || exit 1; \ - fi; \ - done - $(MAKE) $(AM_MAKEFLAGS) \ - top_distdir="$(top_distdir)" distdir="$(distdir)" \ - dist-hook - -find $(distdir) -type d ! -perm -777 -exec chmod a+rwx {} \; -o \ - ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ - ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ - ! -type d ! -perm -444 -exec $(SHELL) $(install_sh) -c -m a+r {} {} \; \ - || chmod -R a+r $(distdir) -dist-gzip: distdir - tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz - $(am__remove_distdir) - -dist-bzip2: distdir - tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2 - $(am__remove_distdir) - -dist-tarZ: distdir - tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z - $(am__remove_distdir) - -dist-shar: distdir - shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz - $(am__remove_distdir) - -dist-zip: distdir - -rm -f $(distdir).zip - zip -rq $(distdir).zip $(distdir) - $(am__remove_distdir) - -dist dist-all: distdir - tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz - $(am__remove_distdir) - -# This target untars the dist file and tries a VPATH configuration. Then -# it guarantees that the distribution is self-contained by making another -# tarfile. -distcheck: dist - case '$(DIST_ARCHIVES)' in \ - *.tar.gz*) \ - GZIP=$(GZIP_ENV) gunzip -c $(distdir).tar.gz | $(am__untar) ;;\ - *.tar.bz2*) \ - bunzip2 -c $(distdir).tar.bz2 | $(am__untar) ;;\ - *.tar.Z*) \ - uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ - *.shar.gz*) \ - GZIP=$(GZIP_ENV) gunzip -c $(distdir).shar.gz | unshar ;;\ - *.zip*) \ - unzip $(distdir).zip ;;\ - esac - chmod -R a-w $(distdir); chmod a+w $(distdir) - mkdir $(distdir)/_build - mkdir $(distdir)/_inst - chmod a-w $(distdir) - dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ - && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ - && cd $(distdir)/_build \ - && ../configure --srcdir=.. --prefix="$$dc_install_base" \ - $(DISTCHECK_CONFIGURE_FLAGS) \ - && $(MAKE) $(AM_MAKEFLAGS) \ - && $(MAKE) $(AM_MAKEFLAGS) dvi \ - && $(MAKE) $(AM_MAKEFLAGS) check \ - && $(MAKE) $(AM_MAKEFLAGS) install \ - && $(MAKE) $(AM_MAKEFLAGS) installcheck \ - && $(MAKE) $(AM_MAKEFLAGS) uninstall \ - && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ - distuninstallcheck \ - && chmod -R a-w "$$dc_install_base" \ - && ({ \ - (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ - && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ - distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ - } || { rm -rf "$$dc_destdir"; exit 1; }) \ - && rm -rf "$$dc_destdir" \ - && $(MAKE) $(AM_MAKEFLAGS) dist \ - && rm -rf $(DIST_ARCHIVES) \ - && $(MAKE) $(AM_MAKEFLAGS) distcleancheck - $(am__remove_distdir) - @(echo "$(distdir) archives ready for distribution: "; \ - list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ - sed -e '1{h;s/./=/g;p;x;}' -e '$${p;x;}' -distuninstallcheck: - @cd $(distuninstallcheck_dir) \ - && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \ - || { echo "ERROR: files left after uninstall:" ; \ - if test -n "$(DESTDIR)"; then \ - echo " (check DESTDIR support)"; \ - fi ; \ - $(distuninstallcheck_listfiles) ; \ - exit 1; } >&2 -distcleancheck: distclean - @if test '$(srcdir)' = . ; then \ - echo "ERROR: distcleancheck can only run from a VPATH build" ; \ - exit 1 ; \ - fi - @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ - || { echo "ERROR: files left in build directory after distclean:" ; \ - $(distcleancheck_listfiles) ; \ - exit 1; } >&2 -check-am: all-am -check: check-recursive -all-am: Makefile $(SCRIPTS) $(DATA) -installdirs: installdirs-recursive -installdirs-am: - for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(pkgconfigdir)"; do \ - test -z "$$dir" || $(mkdir_p) "$$dir"; \ - done -install: install-recursive -install-exec: install-exec-recursive -install-data: install-data-recursive -uninstall: uninstall-recursive - -install-am: all-am - @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am - -installcheck: installcheck-recursive -install-strip: - $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ - install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ - `test -z '$(STRIP)' || \ - echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install -mostlyclean-generic: - -clean-generic: - -distclean-generic: - -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) - -maintainer-clean-generic: - @echo "This command is intended for maintainers to use" - @echo "it deletes files that may require special tools to rebuild." -clean: clean-recursive - -clean-am: clean-generic clean-libtool mostlyclean-am - -distclean: distclean-recursive - -rm -f $(am__CONFIG_DISTCLEAN_FILES) - -rm -f Makefile -distclean-am: clean-am distclean-generic distclean-libtool \ - distclean-tags - -dvi: dvi-recursive - -dvi-am: - -info: info-recursive - -info-am: - -install-data-am: install-pkgconfigDATA - @$(NORMAL_INSTALL) - $(MAKE) $(AM_MAKEFLAGS) install-data-hook - -install-exec-am: install-binSCRIPTS - -install-info: install-info-recursive - -install-man: - -installcheck-am: - -maintainer-clean: maintainer-clean-recursive - -rm -f $(am__CONFIG_DISTCLEAN_FILES) - -rm -rf $(top_srcdir)/autom4te.cache - -rm -f Makefile -maintainer-clean-am: distclean-am maintainer-clean-generic - -mostlyclean: mostlyclean-recursive - -mostlyclean-am: mostlyclean-generic mostlyclean-libtool - -pdf-am: - -ps: ps-recursive - -ps-am: - -uninstall-am: uninstall-binSCRIPTS uninstall-info-am \ - uninstall-pkgconfigDATA - @$(NORMAL_INSTALL) - $(MAKE) $(AM_MAKEFLAGS) uninstall-hook - -uninstall-info: uninstall-info-recursive - -.PHONY: $(RECURSIVE_TARGETS) CTAGS GTAGS all all-am am--refresh check \ - check-am clean clean-generic clean-libtool clean-recursive \ - ctags ctags-recursive dist dist-all dist-bzip2 dist-gzip \ - dist-hook dist-shar dist-tarZ dist-zip distcheck distclean \ - distclean-generic distclean-libtool distclean-recursive \ - distclean-tags distcleancheck distdir distuninstallcheck dvi \ - dvi-am html html-am info info-am install install-am \ - install-binSCRIPTS install-data install-data-am \ - install-data-hook install-exec install-exec-am install-info \ - install-info-am install-man install-pkgconfigDATA \ - install-strip installcheck installcheck-am installdirs \ - installdirs-am maintainer-clean maintainer-clean-generic \ - maintainer-clean-recursive mostlyclean mostlyclean-generic \ - mostlyclean-libtool mostlyclean-recursive pdf pdf-am ps ps-am \ - tags tags-recursive uninstall uninstall-am \ - uninstall-binSCRIPTS uninstall-hook uninstall-info-am \ - uninstall-pkgconfigDATA - - -dist-hook: - rm -rf $(top_builddir)/tests/log - find $(distdir) -name "*.dist" -exec rm {} \; - (distit=`find $(srcdir) -name "*.dist"`; \ - for file in $$distit; do \ - strip=`echo $$file | sed -e s/^$(srcdir)// -e s/\.dist//`; \ - cp $$file $(distdir)$$strip; \ - done) - -html: - cd docs; make html - -pdf: - cd docs; make pdf - -check: test - -#test-full: test -#test-torture: test - -#test: -# @echo "NOTICE: we can't run the tests when cross-compiling!" - -test: - @(cd tests; $(MAKE) all quiet-test) - -test-full: - @(cd tests; $(MAKE) all full-test) - -test-torture: - @(cd tests; $(MAKE) all torture-test) - -# -# Build source and binary rpms. For rpm-3.0 and above, the ~/.rpmmacros -# must contain the following line: -# %_topdir /home/loic/local/rpm -# and that /home/loic/local/rpm contains the directory SOURCES, BUILD etc. -# -# cd /home/loic/local/rpm ; mkdir -p SOURCES BUILD RPMS/i386 SPECS SRPMS -# -# If additional configure flags are needed to build the package, add the -# following in ~/.rpmmacros -# %configure CFLAGS="%{optflags}" ./configure %{_target_platform} --prefix=%{_prefix} ${AM_CONFIGFLAGS} -# and run make rpm in the following way: -# AM_CONFIGFLAGS='--with-uri=/home/users/loic/local/RedHat-6.2' make rpm -# - -rpms: - $(MAKE) RPMDIST=curl rpm - $(MAKE) RPMDIST=curl-ssl rpm - -rpm: - RPM_TOPDIR=`rpm --showrc | $(PERL) -n -e 'print if(s/.*_topdir\s+(.*)/$$1/)'` ; \ - cp $(srcdir)/packages/Linux/RPM/$(RPMDIST).spec $$RPM_TOPDIR/SPECS ; \ - cp $(PACKAGE)-$(VERSION).tar.gz $$RPM_TOPDIR/SOURCES ; \ - rpm -ba --clean --rmsource $$RPM_TOPDIR/SPECS/$(RPMDIST).spec ; \ - mv $$RPM_TOPDIR/RPMS/i386/$(RPMDIST)-*.rpm . ; \ - mv $$RPM_TOPDIR/SRPMS/$(RPMDIST)-*.src.rpm . - -# -# Build a Solaris pkkgadd format file -# run 'make pkgadd' once you've done './configure' and 'make' to make a Solaris pkgadd format -# file (which ends up back in this directory). -# The pkgadd file is in 'pkgtrans' format, so to install on Solaris, do -# pkgadd -d ./HAXXcurl-* -# - -# gak - libtool requires an absoulte directory, hence the pwd below... -pkgadd: - umask 022 ; \ - make install DESTDIR=`/bin/pwd`/packages/Solaris/root ; \ - cat COPYING > $(srcdir)/packages/Solaris/copyright ; \ - cd $(srcdir)/packages/Solaris && $(MAKE) package - -# -# Build a cygwin binary tarball installation file -# resulting .tar.bz2 file will end up at packages/Win32/cygwin -cygwinbin: - $(MAKE) -C packages/Win32/cygwin cygwinbin - -# We extend the standard install with a custom hook: -install-data-hook: - cd include && $(MAKE) install - cd docs && $(MAKE) install - -# We extend the standard uninstall with a custom hook: -uninstall-hook: - cd include && $(MAKE) uninstall - cd docs && $(MAKE) uninstall -# Tell versions [3.59,3.63) of GNU make to not export all variables. -# Otherwise a system limit (for SysV at least) may be exceeded. -.NOEXPORT: diff --git a/libs/curl/lib/ca-bundle.h b/libs/curl/lib/ca-bundle.h deleted file mode 100644 index f792fbde8b..0000000000 --- a/libs/curl/lib/ca-bundle.h +++ /dev/null @@ -1,2 +0,0 @@ -/* This file is generated automatically */ -#define CURL_CA_BUNDLE "/usr/local/freeswitch.test/share/curl/curl-ca-bundle.crt" diff --git a/libs/curl/src/hugehelp.c b/libs/curl/src/hugehelp.c deleted file mode 100644 index 18d5f5e80e..0000000000 --- a/libs/curl/src/hugehelp.c +++ /dev/null @@ -1,6003 +0,0 @@ -#include "setup.h" -#ifndef HAVE_LIBZ -/* - * NEVER EVER edit this manually, fix the mkhelp.pl script instead! - * Generation time: Wed Oct 25 22:40:23 2006 - */ -#include "setup.h" -#ifdef USE_MANUAL -#include "hugehelp.h" -#include -void hugehelp(void) -{ - fputs( -" _ _ ____ _ \n" -" Project ___| | | | _ \\| | \n" -" / __| | | | |_) | | \n" -" | (__| |_| | _ <| |___ \n" -" \\___|\\___/|_| \\_\\_____|\n" -"\n" -"NAME\n" -" curl - transfer a URL\n" -"\n" -"SYNOPSIS\n" -" curl [options] [URL...]\n" -"\n" -"DESCRIPTION\n" -" curl is a tool to transfer data from or to a server, using one of the\n" -, stdout); - fputs( -" supported protocols (HTTP, HTTPS, FTP, FTPS, TFTP, DICT, TELNET, LDAP\n" -" or FILE). The command is designed to work without user interaction.\n" -"\n" -" curl offers a busload of useful tricks like proxy support, user authen-\n" -" tication, ftp upload, HTTP post, SSL connections, cookies, file trans-\n" -" fer resume and more. As you will see below, the amount of features will\n" -" make your head spin!\n" -"\n" -, stdout); - fputs( -" curl is powered by libcurl for all transfer-related features. See\n" -" libcurl(3) for details.\n" -"\n" -"URL\n" -" The URL syntax is protocol dependent. You'll find a detailed descrip-\n" -" tion in RFC 3986.\n" -"\n" -" You can specify multiple URLs or parts of URLs by writing part sets\n" -" within braces as in:\n" -"\n" -" http://site.{one,two,three}.com\n" -"\n" -" or you can get sequences of alphanumeric series by using [] as in:\n" -"\n" -" ftp://ftp.numericals.com/file[1-100].txt\n" -, stdout); - fputs( -" ftp://ftp.numericals.com/file[001-100].txt (with leading zeros)\n" -" ftp://ftp.letters.com/file[a-z].txt\n" -"\n" -" No nesting of the sequences is supported at the moment, but you can use\n" -" several ones next to each other:\n" -"\n" -" http://any.org/archive[1996-1999]/vol[1-4]/part{a,b,c}.html\n" -"\n" -" You can specify any amount of URLs on the command line. They will be\n" -" fetched in a sequential manner in the specified order.\n" -"\n" -, stdout); - fputs( -" Since curl 7.15.1 you can also specify step counter for the ranges, so\n" -" that you can get every Nth number or letter:\n" -" http://www.numericals.com/file[1-100:10].txt\n" -" http://www.letters.com/file[a-z:2].txt\n" -"\n" -" If you specify URL without protocol:// prefix, curl will attempt to\n" -" guess what protocol you might want. It will then default to HTTP but\n" -" try other protocols based on often-used host name prefixes. For exam-\n" -, stdout); - fputs( -" ple, for host names starting with \"ftp.\" curl will assume you want to\n" -" speak FTP.\n" -"\n" -" Curl will attempt to re-use connections for multiple file transfers, so\n" -" that getting many files from the same server will not do multiple con-\n" -" nects / handshakes. This improves speed. Of course this is only done on\n" -" files specified on a single command line and cannot be used between\n" -" separate curl invokes.\n" -"\n" -"PROGRESS METER\n" -, stdout); - fputs( -" curl normally displays a progress meter during operations, indicating\n" -" amount of transfered data, transfer speeds and estimated time left etc.\n" -" However, since curl displays data to the terminal by default, if you\n" -" invoke curl to do an operation and it is about to write data to the\n" -" terminal, it disables the progress meter as otherwise it would mess up\n" -" the output mixing progress meter and response data.\n" -"\n" -, stdout); - fputs( -" If you want a progress meter for HTTP POST or PUT requests, you need to\n" -" redirect the response output to a file, using shell redirect (>), -o\n" -" [file] or similar.\n" -"\n" -" It is not the same case for FTP upload as that operation is not spit-\n" -" ting out any response data to the terminal.\n" -"\n" -" If you prefer a progress \"bar\" instead of the regular meter, -# is your\n" -" friend.\n" -"OPTIONS\n" -" -a/--append\n" -, stdout); - fputs( -" (FTP) When used in an FTP upload, this will tell curl to append\n" -" to the target file instead of overwriting it. If the file\n" -" doesn't exist, it will be created.\n" -"\n" -" If this option is used twice, the second one will disable append\n" -" mode again.\n" -"\n" -" -A/--user-agent \n" -" (HTTP) Specify the User-Agent string to send to the HTTP server.\n" -, stdout); - fputs( -" Some badly done CGIs fail if its not set to \"Mozilla/4.0\". To\n" -" encode blanks in the string, surround the string with single\n" -" quote marks. This can also be set with the -H/--header option\n" -" of course.\n" -"\n" -" If this option is set more than once, the last one will be the\n" -" one that's used.\n" -"\n" -" --anyauth\n" -" (HTTP) Tells curl to figure out authentication method by itself,\n" -, stdout); - fputs( -" and use the most secure one the remote site claims it supports.\n" -" This is done by first doing a request and checking the response-\n" -" headers, thus inducing an extra network round-trip. This is used\n" -" instead of setting a specific authentication method, which you\n" -" can do with --basic, --digest, --ntlm, and --negotiate.\n" -"\n" -" Note that using --anyauth is not recommended if you do uploads\n" -, stdout); - fputs( -" from stdin, since it may require data to be sent twice and then\n" -" the client must be able to rewind. If the need should arise when\n" -" uploading from stdin, the upload operation will fail.\n" -"\n" -" If this option is used several times, the following occurrences\n" -" make no difference.\n" -"\n" -" -b/--cookie \n" -" (HTTP) Pass the data to the HTTP server as a cookie. It is sup-\n" -, stdout); - fputs( -" posedly the data previously received from the server in a \"Set-\n" -" Cookie:\" line. The data should be in the format \"NAME1=VALUE1;\n" -" NAME2=VALUE2\".\n" -"\n" -" If no '=' letter is used in the line, it is treated as a file-\n" -" name to use to read previously stored cookie lines from, which\n" -" should be used in this session if they match. Using this method\n" -, stdout); - fputs( -" also activates the \"cookie parser\" which will make curl record\n" -" incoming cookies too, which may be handy if you're using this in\n" -" combination with the -L/--location option. The file format of\n" -" the file to read cookies from should be plain HTTP headers or\n" -" the Netscape/Mozilla cookie file format.\n" -"\n" -" NOTE that the file specified with -b/--cookie is only used as\n" -, stdout); - fputs( -" input. No cookies will be stored in the file. To store cookies,\n" -" use the -c/--cookie-jar option or you could even save the HTTP\n" -" headers to a file using -D/--dump-header!\n" -"\n" -" If this option is set more than once, the last one will be the\n" -" one that's used.\n" -"\n" -" -B/--use-ascii\n" -" Enable ASCII transfer when using FTP or LDAP. For FTP, this can\n" -, stdout); - fputs( -" also be enforced by using an URL that ends with \";type=A\". This\n" -" option causes data sent to stdout to be in text mode for win32\n" -" systems.\n" -"\n" -" If this option is used twice, the second one will disable ASCII\n" -" usage.\n" -"\n" -" --basic\n" -" (HTTP) Tells curl to use HTTP Basic authentication. This is the\n" -" default and this option is usually pointless, unless you use it\n" -, stdout); - fputs( -" to override a previously set option that sets a different\n" -" authentication method (such as --ntlm, --digest and --negoti-\n" -" ate).\n" -"\n" -" If this option is used several times, the following occurrences\n" -" make no difference.\n" -"\n" -" --ciphers \n" -" (SSL) Specifies which ciphers to use in the connection. The list\n" -" of ciphers must be using valid ciphers. Read up on SSL cipher\n" -, stdout); - fputs( -" list details on this URL:\n" -" http://www.openssl.org/docs/apps/ciphers.html\n" -"\n" -" If this option is used several times, the last one will override\n" -" the others.\n" -"\n" -" --compressed\n" -" (HTTP) Request a compressed response using one of the algorithms\n" -" libcurl supports, and return the uncompressed document. If this\n" -, stdout); - fputs( -" option is used and the server sends an unsupported encoding,\n" -" Curl will report an error.\n" -"\n" -" If this option is used several times, each occurrence will tog-\n" -" gle it on/off.\n" -"\n" -" --connect-timeout \n" -" Maximum time in seconds that you allow the connection to the\n" -" server to take. This only limits the connection phase, once\n" -, stdout); - fputs( -" curl has connected this option is of no more use. See also the\n" -" -m/--max-time option.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" -c/--cookie-jar \n" -" Specify to which file you want curl to write all cookies after a\n" -" completed operation. Curl writes all cookies previously read\n" -" from a specified file as well as all cookies received from\n" -, stdout); - fputs( -" remote server(s). If no cookies are known, no file will be writ-\n" -" ten. The file will be written using the Netscape cookie file\n" -" format. If you set the file name to a single dash, \"-\", the\n" -" cookies will be written to stdout.\n" -"\n" -" NOTE If the cookie jar can't be created or written to, the whole\n" -" curl operation won't fail or even report an error clearly. Using\n" -, stdout); - fputs( -" -v will get a warning displayed, but that is the only visible\n" -" feedback you get about this possibly lethal situation.\n" -"\n" -" If this option is used several times, the last specified file\n" -" name will be used.\n" -"\n" -" -C/--continue-at \n" -" Continue/Resume a previous file transfer at the given offset.\n" -" The given offset is the exact number of bytes that will be\n" -, stdout); - fputs( -" skipped counted from the beginning of the source file before it\n" -" is transferred to the destination. If used with uploads, the\n" -" ftp server command SIZE will not be used by curl.\n" -"\n" -" Use \"-C -\" to tell curl to automatically find out where/how to\n" -" resume the transfer. It then uses the given output/input files\n" -" to figure that out.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -, stdout); - fputs( -" --create-dirs\n" -" When used in conjunction with the -o option, curl will create\n" -" the necessary local directory hierarchy as needed. This option\n" -" creates the dirs mentioned with the -o option, nothing else. If\n" -" the -o file name uses no dir or if the dirs it mentions already\n" -" exist, no dir will be created.\n" -"\n" -" To create remote directories when using FTP, try --ftp-create-\n" -" dirs.\n" -"\n" -, stdout); - fputs( -" --crlf (FTP) Convert LF to CRLF in upload. Useful for MVS (OS/390).\n" -"\n" -" If this option is used several times, the following occurrences\n" -" make no difference.\n" -"\n" -" -d/--data \n" -" (HTTP) Sends the specified data in a POST request to the HTTP\n" -" server, in a way that can emulate as if a user has filled in a\n" -" HTML form and pressed the submit button. Note that the data is\n" -, stdout); - fputs( -" sent exactly as specified with no extra processing (with all\n" -" newlines cut off). The data is expected to be \"url-encoded\".\n" -" This will cause curl to pass the data to the server using the\n" -" content-type application/x-www-form-urlencoded. Compare to\n" -" -F/--form. If this option is used more than once on the same\n" -" command line, the data pieces specified will be merged together\n" -, stdout); - fputs( -" with a separating &-letter. Thus, using '-d name=daniel -d\n" -" skill=lousy' would generate a post chunk that looks like\n" -" 'name=daniel&skill=lousy'.\n" -"\n" -" If you start the data with the letter @, the rest should be a\n" -" file name to read the data from, or - if you want curl to read\n" -" the data from stdin. The contents of the file must already be\n" -, stdout); - fputs( -" url-encoded. Multiple files can also be specified. Posting data\n" -" from a file named 'foobar' would thus be done with --data @foo-\n" -" bar\".\n" -"\n" -" To post data purely binary, you should instead use the --data-\n" -" binary option.\n" -"\n" -" -d/--data is the same as --data-ascii.\n" -"\n" -" If this option is used several times, the ones following the\n" -" first will append data.\n" -"\n" -" --data-ascii \n" -, stdout); - fputs( -" (HTTP) This is an alias for the -d/--data option.\n" -"\n" -" If this option is used several times, the ones following the\n" -" first will append data.\n" -"\n" -" --data-binary \n" -" (HTTP) This posts data in a similar manner as --data-ascii does,\n" -" although when using this option the entire context of the posted\n" -" data is kept as-is. If you want to post a binary file without\n" -, stdout); - fputs( -" the strip-newlines feature of the --data-ascii option, this is\n" -" for you.\n" -"\n" -" If this option is used several times, the ones following the\n" -" first will append data.\n" -"\n" -" --digest\n" -" (HTTP) Enables HTTP Digest authentication. This is a authentica-\n" -" tion that prevents the password from being sent over the wire in\n" -" clear text. Use this in combination with the normal -u/--user\n" -, stdout); - fputs( -" option to set user name and password. See also --ntlm, --negoti-\n" -" ate and --anyauth for related options.\n" -"\n" -" If this option is used several times, the following occurrences\n" -" make no difference.\n" -"\n" -" --disable-eprt\n" -" (FTP) Tell curl to disable the use of the EPRT and LPRT commands\n" -" when doing active FTP transfers. Curl will normally always first\n" -, stdout); - fputs( -" attempt to use EPRT, then LPRT before using PORT, but with this\n" -" option, it will use PORT right away. EPRT and LPRT are exten-\n" -" sions to the original FTP protocol, may not work on all servers\n" -" but enable more functionality in a better way than the tradi-\n" -" tional PORT command.\n" -"\n" -" If this option is used several times, each occurrence will tog-\n" -" gle this on/off.\n" -"\n" -" --disable-epsv\n" -, stdout); - fputs( -" (FTP) Tell curl to disable the use of the EPSV command when\n" -" doing passive FTP transfers. Curl will normally always first\n" -" attempt to use EPSV before PASV, but with this option, it will\n" -" not try using EPSV.\n" -"\n" -" If this option is used several times, each occurrence will tog-\n" -" gle this on/off.\n" -"\n" -" -D/--dump-header \n" -" Write the protocol headers to the specified file.\n" -"\n" -, stdout); - fputs( -" This option is handy to use when you want to store the headers\n" -" that a HTTP site sends to you. Cookies from the headers could\n" -" then be read in a second curl invoke by using the -b/--cookie\n" -" option! The -c/--cookie-jar option is however a better way to\n" -" store cookies.\n" -"\n" -" When used on FTP, the ftp server response lines are considered\n" -" being \"headers\" and thus are saved there.\n" -"\n" -, stdout); - fputs( -" If this option is used several times, the last one will be used.\n" -"\n" -" -e/--referer \n" -" (HTTP) Sends the \"Referer Page\" information to the HTTP server.\n" -" This can also be set with the -H/--header flag of course. When\n" -" used with -L/--location you can append \";auto\" to the --referer\n" -" URL to make curl automatically set the previous URL when it fol-\n" -, stdout); - fputs( -" lows a Location: header. The \";auto\" string can be used alone,\n" -" even if you don't set an initial --referer.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" --engine \n" -" Select the OpenSSL crypto engine to use for cipher operations.\n" -" Use --engine list to print a list of build-time supported\n" -" engines. Note that not all (or none) of the engines may be\n" -, stdout); - fputs( -" available at run-time.\n" -"\n" -" --environment\n" -" (RISC OS ONLY) Sets a range of environment variables, using the\n" -" names the -w option supports, to easier allow extraction of use-\n" -" ful information after having run curl.\n" -"\n" -" If this option is used several times, each occurrence will tog-\n" -" gle this on/off.\n" -"\n" -" --egd-file \n" -" (SSL) Specify the path name to the Entropy Gathering Daemon\n" -, stdout); - fputs( -" socket. The socket is used to seed the random engine for SSL\n" -" connections. See also the --random-file option.\n" -"\n" -" -E/--cert \n" -" (SSL) Tells curl to use the specified certificate file when get-\n" -" ting a file with HTTPS or FTPS. The certificate must be in PEM\n" -" format. If the optional password isn't specified, it will be\n" -" queried for on the terminal. Note that this option assumes a\n" -, stdout); - fputs( -" \"certificate\" file that is the private key and the private cer-\n" -" tificate concatenated! See --certP and --key to specify them\n" -" independently.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" --cert-type \n" -" (SSL) Tells curl what certificate type the provided certificate\n" -" is in. PEM, DER and ENG are recognized types. If not specified,\n" -" PEM is assumed.\n" -"\n" -, stdout); - fputs( -" If this option is used several times, the last one will be used.\n" -"\n" -" --cacert \n" -" (SSL) Tells curl to use the specified certificate file to verify\n" -" the peer. The file may contain multiple CA certificates. The\n" -" certificate(s) must be in PEM format.\n" -"\n" -" curl recognizes the environment variable named 'CURL_CA_BUNDLE'\n" -" if that is set, and uses the given path as a path to a CA cert\n" -, stdout); - fputs( -" bundle. This option overrides that variable.\n" -"\n" -" The windows version of curl will automatically look for a CA\n" -" certs file named 'curl-ca-bundle.crt', either in the same direc-\n" -" tory as curl.exe, or in the Current Working Directory, or in any\n" -" folder along your PATH.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" --capath \n" -, stdout); - fputs( -" (SSL) Tells curl to use the specified certificate directory to\n" -" verify the peer. The certificates must be in PEM format, and the\n" -" directory must have been processed using the c_rehash utility\n" -" supplied with openssl. Using --capath can allow curl to make\n" -" SSL-connections much more efficiently than using --cacert if the\n" -" --cacert file contains many CA certificates.\n" -"\n" -, stdout); - fputs( -" If this option is used several times, the last one will be used.\n" -"\n" -" -f/--fail\n" -" (HTTP) Fail silently (no output at all) on server errors. This\n" -" is mostly done like this to better enable scripts etc to better\n" -" deal with failed attempts. In normal cases when a HTTP server\n" -" fails to deliver a document, it returns an HTML document stating\n" -" so (which often also describes why and more). This flag will\n" -, stdout); - fputs( -" prevent curl from outputting that and return error 22.\n" -"\n" -" This method is not fail-safe and there are occasions where non-\n" -" succesful response codes will slip through, especially when\n" -" authentication is involved (response codes 401 and 407).\n" -"\n" -" If this option is used twice, the second will again disable\n" -" silent failure.\n" -"\n" -" --ftp-account [data]\n" -, stdout); - fputs( -" (FTP) When an FTP server asks for \"account data\" after user name\n" -" and password has been provided, this data is sent off using the\n" -" ACCT command. (Added in 7.13.0)\n" -"\n" -" If this option is used twice, the second will override the pre-\n" -" vious use.\n" -"\n" -" --ftp-create-dirs\n" -" (FTP) When an FTP URL/operation uses a path that doesn't cur-\n" -" rently exist on the server, the standard behavior of curl is to\n" -, stdout); - fputs( -" fail. Using this option, curl will instead attempt to create\n" -" missing directories.\n" -"\n" -" If this option is used twice, the second will again disable\n" -" directory creation.\n" -"\n" -" --ftp-method [method]\n" -" (FTP) Control what method curl should use to reach a file on a\n" -" FTP(S) server. The method argument should be one of the follow-\n" -" ing alternatives:\n" -"\n" -" multicwd\n" -, stdout); - fputs( -" curl does a single CWD operation for each path part in\n" -" the given URL. For deep hierarchies this means very many\n" -" commands. This is how RFC1738 says it should be done.\n" -" This is the default but the slowest behavior.\n" -"\n" -" nocwd curl does no CWD at all. curl will do SIZE, RETR, STOR\n" -" etc and give a full path to the server for all these com-\n" -, stdout); - fputs( -" mands. This is the fastest behavior.\n" -"\n" -" singlecwd\n" -" curl does one CWD with the full target directory and then\n" -" operates on the file \"normally\" (like in the multicwd\n" -" case). This is somewhat more standards compliant than\n" -" 'nocwd' but without the full penalty of 'multicwd'.\n" -"\n" -" --ftp-pasv\n" -" (FTP) Use PASV when transferring. PASV is the internal default\n" -, stdout); - fputs( -" behavior, but using this option can be used to override a previ-\n" -" ous --ftp-port option. (Added in 7.11.0)\n" -"\n" -" If this option is used several times, the following occurrences\n" -" make no difference.\n" -"\n" -" --ftp-alternative-to-user \n" -" (FTP) If authenticating with the USER and PASS commands fails,\n" -" send this command. When connecting to Tumbleweed's Secure\n" -, stdout); - fputs( -" Transport server over FTPS using a client certificate, using\n" -" \"SITE AUTH\" will tell the server to retrieve the username from\n" -" the certificate. (Added in 7.15.5)\n" -"\n" -" --ftp-skip-pasv-ip\n" -" (FTP) Tell curl to not use the IP address the server suggests in\n" -" its response to curl's PASV command when curl connects the data\n" -" connection. Instead curl will re-use the same IP address it\n" -, stdout); - fputs( -" already uses for the control connection. (Added in 7.14.2)\n" -"\n" -" This option has no effect if PORT, EPRT or EPSV is used instead\n" -" of PASV.\n" -"\n" -" If this option is used twice, the second will again use the\n" -" server's suggested address.\n" -"\n" -" --ftp-ssl\n" -" (FTP) Try to use SSL/TLS for the FTP connection. Reverts to a\n" -" non-secure connection if the server doesn't support SSL/TLS.\n" -, stdout); - fputs( -" See also --ftp-ssl-control and --ftp-ssl-reqd for different lev-\n" -" els of encryption required. (Added in 7.11.0)\n" -"\n" -" If this option is used twice, the second will again disable\n" -" this.\n" -"\n" -" --ftp-ssl-control\n" -" (FTP) Require SSL/TLS for the ftp login, clear for transfer.\n" -" Allows secure authentication, but non-encrypted data transfers\n" -, stdout); - fputs( -" for efficiency. Fails the transfer if the server doesn't sup-\n" -" port SSL/TLS. (Added in 7.16.0)\n" -"\n" -" If this option is used twice, the second will again disable\n" -" this.\n" -"\n" -" --ftp-ssl-reqd\n" -" (FTP) Require SSL/TLS for the FTP connection. Terminates the\n" -" connection if the server doesn't support SSL/TLS. (Added in\n" -" 7.15.5)\n" -"\n" -, stdout); - fputs( -" If this option is used twice, the second will again disable\n" -" this.\n" -"\n" -" -F/--form \n" -" (HTTP) This lets curl emulate a filled in form in which a user\n" -" has pressed the submit button. This causes curl to POST data\n" -" using the Content-Type multipart/form-data according to RFC1867.\n" -" This enables uploading of binary files etc. To force the 'con-\n" -, stdout); - fputs( -" tent' part to be a file, prefix the file name with an @ sign. To\n" -" just get the content part from a file, prefix the file name with\n" -" the letter <. The difference between @ and < is then that @\n" -" makes a file get attached in the post as a file upload, while\n" -" the < makes a text field and just get the contents for that text\n" -" field from a file.\n" -"\n" -, stdout); - fputs( -" Example, to send your password file to the server, where 'pass-\n" -" word' is the name of the form-field to which /etc/passwd will be\n" -" the input:\n" -"\n" -" curl -F password=@/etc/passwd www.mypasswords.com\n" -"\n" -" To read the file's content from stdin instead of a file, use -\n" -" where the file name should've been. This goes for both @ and <\n" -" constructs.\n" -"\n" -, stdout); - fputs( -" You can also tell curl what Content-Type to use by using\n" -" 'type=', in a manner similar to:\n" -"\n" -" curl -F \"web=@index.html;type=text/html\" url.com\n" -"\n" -" or\n" -"\n" -" curl -F \"name=daniel;type=text/foo\" url.com\n" -"\n" -" You can also explicitly change the name field of an file upload\n" -" part by setting filename=, like this:\n" -"\n" -" curl -F \"file=@localfile;filename=nameinpost\" url.com\n" -"\n" -, stdout); - fputs( -" See further examples and details in the MANUAL.\n" -"\n" -" This option can be used multiple times.\n" -"\n" -" --form-string \n" -" (HTTP) Similar to --form except that the value string for the\n" -" named parameter is used literally. Leading '@' and '<'\n" -" characters, and the ';type=' string in the value have no special\n" -" meaning. Use this in preference to --form if there's any possi-\n" -, stdout); - fputs( -" bility that the string value may accidentally trigger the '@' or\n" -" '<' features of --form.\n" -"\n" -" -g/--globoff\n" -" This option switches off the \"URL globbing parser\". When you set\n" -" this option, you can specify URLs that contain the letters {}[]\n" -" without having them being interpreted by curl itself. Note that\n" -" these letters are not normal legal URL contents but they should\n" -, stdout); - fputs( -" be encoded according to the URI standard.\n" -"\n" -" -G/--get\n" -" When used, this option will make all data specified with\n" -" -d/--data or --data-binary to be used in a HTTP GET request\n" -" instead of the POST request that otherwise would be used. The\n" -" data will be appended to the URL with a '?' separator.\n" -"\n" -" If used in combination with -I, the POST data will instead be\n" -, stdout); - fputs( -" appended to the URL with a HEAD request.\n" -"\n" -" If this option is used several times, the following occurrences\n" -" make no difference.\n" -"\n" -" -h/--help\n" -" Usage help.\n" -"\n" -" -H/--header
\n" -" (HTTP) Extra header to use when getting a web page. You may\n" -" specify any number of extra headers. Note that if you should add\n" -" a custom header that has the same name as one of the internal\n" -, stdout); - fputs( -" ones curl would use, your externally set header will be used\n" -" instead of the internal one. This allows you to make even trick-\n" -" ier stuff than curl would normally do. You should not replace\n" -" internally set headers without knowing perfectly well what\n" -" you're doing. Remove an internal header by giving a replacement\n" -" without content on the right side of the colon, as in: -H\n" -" \"Host:\".\n" -"\n" -, stdout); - fputs( -" curl will make sure that each header you add/replace get sent\n" -" with the proper end of line marker, you should thus not add that\n" -" as a part of the header content: do not add newlines or carriage\n" -" returns they will only mess things up for you.\n" -"\n" -" See also the -A/--user-agent and -e/--referer options.\n" -"\n" -" This option can be used multiple times to add/replace/remove\n" -" multiple headers.\n" -"\n" -, stdout); - fputs( -" --ignore-content-length\n" -" (HTTP) Ignore the Content-Length header. This is particularly\n" -" useful for servers running Apache 1.x, which will report incor-\n" -" rect Content-Length for files larger than 2 gigabytes.\n" -"\n" -" -i/--include\n" -" (HTTP) Include the HTTP-header in the output. The HTTP-header\n" -" includes things like server-name, date of the document, HTTP-\n" -" version and more...\n" -"\n" -, stdout); - fputs( -" If this option is used twice, the second will again disable\n" -" header include.\n" -"\n" -" --interface \n" -" Perform an operation using a specified interface. You can enter\n" -" interface name, IP address or host name. An example could look\n" -" like:\n" -"\n" -" curl --interface eth0:1 http://www.netscape.com/\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" -I/--head\n" -, stdout); - fputs( -" (HTTP/FTP/FILE) Fetch the HTTP-header only! HTTP-servers feature\n" -" the command HEAD which this uses to get nothing but the header\n" -" of a document. When used on a FTP or FILE file, curl displays\n" -" the file size and last modification time only.\n" -"\n" -" If this option is used twice, the second will again disable\n" -" header only.\n" -"\n" -" -j/--junk-session-cookies\n" -, stdout); - fputs( -" (HTTP) When curl is told to read cookies from a given file, this\n" -" option will make it discard all \"session cookies\". This will\n" -" basically have the same effect as if a new session is started.\n" -" Typical browsers always discard session cookies when they're\n" -" closed down.\n" -"\n" -" If this option is used several times, each occurrence will tog-\n" -" gle this on/off.\n" -"\n" -" -k/--insecure\n" -, stdout); - fputs( -" (SSL) This option explicitly allows curl to perform \"insecure\"\n" -" SSL connections and transfers. All SSL connections are attempted\n" -" to be made secure by using the CA certificate bundle installed\n" -" by default. This makes all connections considered \"insecure\" to\n" -" fail unless -k/--insecure is used.\n" -"\n" -" See this online resource for further details:\n" -" http://curl.haxx.se/docs/sslcerts.html\n" -, stdout); - fputs( -"\n" -" If this option is used twice, the second time will again disable\n" -" it.\n" -"\n" -" --key \n" -" (SSL) Private key file name. Allows you to provide your private\n" -" key in this separate file.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" --key-type \n" -" (SSL) Private key file type. Specify which type your --key pro-\n" -, stdout); - fputs( -" vided private key is. DER, PEM and ENG are supported. If not\n" -" specified, PEM is assumed.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" --krb4 \n" -" (FTP) Enable kerberos4 authentication and use. The level must be\n" -" entered and should be one of 'clear', 'safe', 'confidential' or\n" -" 'private'. Should you use a level that is not one of these,\n" -, stdout); - fputs( -" 'private' will instead be used.\n" -"\n" -" This option requires that the library was built with kerberos4\n" -" support. This is not very common. Use -V/--version to see if\n" -" your curl supports it.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" -K/--config \n" -" Specify which config file to read curl arguments from. The con-\n" -, stdout); - fputs( -" fig file is a text file in which command line arguments can be\n" -" written which then will be used as if they were written on the\n" -" actual command line. Options and their parameters must be speci-\n" -" fied on the same config file line. If the parameter is to con-\n" -" tain white spaces, the parameter must be enclosed within quotes.\n" -" If the first column of a config line is a '#' character, the\n" -, stdout); - fputs( -" rest of the line will be treated as a comment.\n" -"\n" -" Specify the filename as '-' to make curl read the file from\n" -" stdin.\n" -"\n" -" Note that to be able to specify a URL in the config file, you\n" -" need to specify it using the --url option, and not by simply\n" -" writing the URL on its own line. So, it could look similar to\n" -" this:\n" -"\n" -" url = \"http://curl.haxx.se/docs/\"\n" -"\n" -, stdout); - fputs( -" This option can be used multiple times.\n" -"\n" -" When curl is invoked, it always (unless -q is used) checks for a\n" -" default config file and uses it if found. The default config\n" -" file is checked for in the following places in this order:\n" -"\n" -" 1) curl tries to find the \"home dir\": It first checks for the\n" -" CURL_HOME and then the HOME environment variables. Failing that,\n" -, stdout); - fputs( -" it uses getpwuid() on unix-like systems (which returns the home\n" -" dir given the current user in your system). On Windows, it then\n" -" checks for the APPDATA variable, or as a last resort the '%USER-\n" -" PROFILE%0lication Data'.\n" -"\n" -" 2) On windows, if there is no _curlrc file in the home dir, it\n" -" checks for one in the same dir the executable curl is placed. On\n" -, stdout); - fputs( -" unix-like systems, it will simply try to load .curlrc from the\n" -" determined home dir.\n" -" --limit-rate \n" -" Specify the maximum transfer rate you want curl to use. This\n" -" feature is useful if you have a limited pipe and you'd like your\n" -" transfer not use your entire bandwidth.\n" -"\n" -" The given speed is measured in bytes/second, unless a suffix is\n" -, stdout); - fputs( -" appended. Appending 'k' or 'K' will count the number as kilo-\n" -" bytes, 'm' or M' makes it megabytes while 'g' or 'G' makes it\n" -" gigabytes. Examples: 200K, 3m and 1G.\n" -"\n" -" If you are also using the -Y/--speed-limit option, that option\n" -" will take precedence and might cripple the rate-limiting\n" -" slightly, to help keeping the speed-limit logic working.\n" -"\n" -, stdout); - fputs( -" If this option is used several times, the last one will be used.\n" -"\n" -" -l/--list-only\n" -" (FTP) When listing an FTP directory, this switch forces a name-\n" -" only view. Especially useful if you want to machine-parse the\n" -" contents of an FTP directory since the normal directory view\n" -" doesn't use a standard look or format.\n" -"\n" -" This option causes an FTP NLST command to be sent. Some FTP\n" -, stdout); - fputs( -" servers list only files in their response to NLST; they do not\n" -" include subdirectories and symbolic links.\n" -"\n" -" If this option is used twice, the second will again disable list\n" -" only.\n" -"\n" -" --local-port [-num]\n" -" Set a prefered number or range of local port numbers to use for\n" -" the connection(s). Note that port numbers by nature is a scarce\n" -, stdout); - fputs( -" resource that will be busy at times so setting this range to\n" -" something too narrow might cause unnecessary connection setup\n" -" failures. (Added in 7.15.2)\n" -"\n" -" -L/--location\n" -" (HTTP/HTTPS) If the server reports that the requested page has\n" -" moved to a different location (indicated with a Location: header\n" -" and a 3XX response code) this option will make curl redo the\n" -, stdout); - fputs( -" request on the new place. If used together with -i/--include or\n" -" -I/--head, headers from all requested pages will be shown. When\n" -" authentication is used, curl only sends its credentials to the\n" -" initial host. If a redirect takes curl to a different host, it\n" -" won't be able to intercept the user+password. See also --loca-\n" -" tion-trusted on how to change this. You can limit the amount of\n" -, stdout); - fputs( -" redirects to follow by using the --max-redirs option.\n" -"\n" -" If this option is used twice, the second will again disable\n" -" location following.\n" -"\n" -" --location-trusted\n" -" (HTTP/HTTPS) Like -L/--location, but will allow sending the name\n" -" + password to all hosts that the site may redirect to. This may\n" -" or may not introduce a security breach if the site redirects you\n" -, stdout); - fputs( -" do a site to which you'll send your authentication info (which\n" -" is plaintext in the case of HTTP Basic authentication).\n" -"\n" -" If this option is used twice, the second will again disable\n" -" location following.\n" -"\n" -" --max-filesize \n" -" Specify the maximum size (in bytes) of a file to download. If\n" -" the file requested is larger than this value, the transfer will\n" -, stdout); - fputs( -" not start and curl will return with exit code 63.\n" -"\n" -" NOTE: The file size is not always known prior to download, and\n" -" for such files this option has no effect even if the file trans-\n" -" fer ends up being larger than this given limit. This concerns\n" -" both FTP and HTTP transfers.\n" -"\n" -" -m/--max-time \n" -" Maximum time in seconds that you allow the whole operation to\n" -, stdout); - fputs( -" take. This is useful for preventing your batch jobs from hang-\n" -" ing for hours due to slow networks or links going down. See\n" -" also the --connect-timeout option.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" -M/--manual\n" -" Manual. Display the huge help text.\n" -"\n" -" -n/--netrc\n" -" Makes curl scan the .netrc file in the user's home directory for\n" -, stdout); - fputs( -" login name and password. This is typically used for ftp on unix.\n" -" If used with http, curl will enable user authentication. See\n" -" netrc(4) or ftp(1) for details on the file format. Curl will not\n" -" complain if that file hasn't the right permissions (it should\n" -" not be world nor group readable). The environment variable\n" -" \"HOME\" is used to find the home directory.\n" -"\n" -, stdout); - fputs( -" A quick and very simple example of how to setup a .netrc to\n" -" allow curl to ftp to the machine host.domain.com with user name\n" -" 'myself' and password 'secret' should look similar to:\n" -"\n" -" machine host.domain.com login myself password secret\n" -"\n" -" If this option is used twice, the second will again disable\n" -" netrc usage.\n" -"\n" -" --netrc-optional\n" -, stdout); - fputs( -" Very similar to --netrc, but this option makes the .netrc usage\n" -" optional and not mandatory as the --netrc does.\n" -"\n" -" --negotiate\n" -" (HTTP) Enables GSS-Negotiate authentication. The GSS-Negotiate\n" -" method was designed by Microsoft and is used in their web appli-\n" -" cations. It is primarily meant as a support for Kerberos5\n" -" authentication but may be also used along with another authenti-\n" -, stdout); - fputs( -" cation methods. For more information see IETF draft draft-\n" -" brezak-spnego-http-04.txt.\n" -"\n" -" This option requires that the library was built with GSSAPI sup-\n" -" port. This is not very common. Use -V/--version to see if your\n" -" version supports GSS-Negotiate.\n" -"\n" -" When using this option, you must also provide a fake -u/--user\n" -" option to activate the authentication code properly. Sending a\n" -, stdout); - fputs( -" '-u :' is enough as the user name and password from the -u\n" -" option aren't actually used.\n" -"\n" -" If this option is used several times, the following occurrences\n" -" make no difference.\n" -"\n" -" -N/--no-buffer\n" -" Disables the buffering of the output stream. In normal work sit-\n" -" uations, curl will use a standard buffered output stream that\n" -" will have the effect that it will output the data in chunks, not\n" -, stdout); - fputs( -" necessarily exactly when the data arrives. Using this option\n" -" will disable that buffering.\n" -"\n" -" If this option is used twice, the second will again switch on\n" -" buffering.\n" -"\n" -" --no-sessionid\n" -" (SSL) Disable curl's use of SSL session-ID caching. By default\n" -" all transfers are done using the cache. Note that while nothing\n" -" ever should get hurt by attempting to reuse SSL session-IDs,\n" -, stdout); - fputs( -" there seem to be broken SSL implementations in the wild that may\n" -" require you to disable this in order for you to succeed. (Added\n" -" in 7.16.0)\n" -"\n" -" If this option is used twice, the second will again switch on\n" -" use of the session cache.\n" -"\n" -" --ntlm (HTTP) Enables NTLM authentication. The NTLM authentication\n" -" method was designed by Microsoft and is used by IIS web servers.\n" -, stdout); - fputs( -" It is a proprietary protocol, reversed engineered by clever peo-\n" -" ple and implemented in curl based on their efforts. This kind of\n" -" behavior should not be endorsed, you should encourage everyone\n" -" who uses NTLM to switch to a public and documented authentica-\n" -" tion method instead. Such as Digest.\n" -"\n" -" If you want to enable NTLM for your proxy authentication, then\n" -" use --proxy-ntlm.\n" -"\n" -, stdout); - fputs( -" This option requires that the library was built with SSL sup-\n" -" port. Use -V/--version to see if your curl supports NTLM.\n" -"\n" -" If this option is used several times, the following occurrences\n" -" make no difference.\n" -"\n" -" -o/--output \n" -" Write output to instead of stdout. If you are using {} or\n" -" [] to fetch multiple documents, you can use '#' followed by a\n" -, stdout); - fputs( -" number in the specifier. That variable will be replaced\n" -" with the current string for the URL being fetched. Like in:\n" -"\n" -" curl http://{one,two}.site.com -o \"file_#1.txt\"\n" -"\n" -" or use several variables like:\n" -"\n" -" curl http://{site,host}.host[1-5].com -o \"#1_#2\"\n" -"\n" -" You may use this option as many times as you have number of\n" -" URLs.\n" -"\n" -, stdout); - fputs( -" See also the --create-dirs option to create the local directo-\n" -" ries dynamically.\n" -"\n" -" -O/--remote-name\n" -" Write output to a local file named like the remote file we get.\n" -" (Only the file part of the remote file is used, the path is cut\n" -" off.)\n" -"\n" -" The remote file name to use for saving is extracted from the\n" -" given URL, nothing else.\n" -"\n" -, stdout); - fputs( -" You may use this option as many times as you have number of\n" -" URLs.\n" -"\n" -" --pass \n" -" (SSL) Pass phrase for the private key\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" --proxy-anyauth\n" -" Tells curl to pick a suitable authentication method when commu-\n" -" nicating with the given proxy. This will cause an extra\n" -" request/response round-trip. (Added in 7.13.2)\n" -"\n" -, stdout); - fputs( -" If this option is used twice, the second will again disable the\n" -" proxy use-any authentication.\n" -"\n" -" --proxy-basic\n" -" Tells curl to use HTTP Basic authentication when communicating\n" -" with the given proxy. Use --basic for enabling HTTP Basic with a\n" -" remote host. Basic is the default authentication method curl\n" -" uses with proxies.\n" -"\n" -" If this option is used twice, the second will again disable\n" -, stdout); - fputs( -" proxy HTTP Basic authentication.\n" -"\n" -" --proxy-digest\n" -" Tells curl to use HTTP Digest authentication when communicating\n" -" with the given proxy. Use --digest for enabling HTTP Digest with\n" -" a remote host.\n" -"\n" -" If this option is used twice, the second will again disable\n" -" proxy HTTP Digest.\n" -"\n" -" --proxy-ntlm\n" -" Tells curl to use HTTP NTLM authentication when communicating\n" -, stdout); - fputs( -" with the given proxy. Use --ntlm for enabling NTLM with a remote\n" -" host.\n" -"\n" -" If this option is used twice, the second will again disable\n" -" proxy HTTP NTLM.\n" -" -p/--proxytunnel\n" -" When an HTTP proxy is used (-x/--proxy), this option will cause\n" -" non-HTTP protocols to attempt to tunnel through the proxy\n" -" instead of merely using it to do HTTP-like operations. The tun-\n" -, stdout); - fputs( -" nel approach is made with the HTTP proxy CONNECT request and\n" -" requires that the proxy allows direct connect to the remote port\n" -" number curl wants to tunnel through to.\n" -"\n" -" If this option is used twice, the second will again disable\n" -" proxy tunnel.\n" -"\n" -" -P/--ftp-port
\n" -" (FTP) Reverses the initiator/listener roles when connecting with\n" -, stdout); - fputs( -" ftp. This switch makes Curl use the PORT command instead of\n" -" PASV. In practice, PORT tells the server to connect to the\n" -" client's specified address and port, while PASV asks the server\n" -" for an ip address and port to connect to.
should be\n" -" one of:\n" -"\n" -" interface\n" -" i.e \"eth0\" to specify which interface's IP address you\n" -" want to use (Unix only)\n" -"\n" -, stdout); - fputs( -" IP address\n" -" i.e \"192.168.10.1\" to specify exact IP number\n" -"\n" -" host name\n" -" i.e \"my.host.domain\" to specify machine\n" -"\n" -" - make curl pick the same IP address that is already used\n" -" for the control connection\n" -"\n" -" If this option is used several times, the last one will be used. Dis-\n" -" able the use of PORT with --ftp-pasv. Disable the attempt to use the\n" -, stdout); - fputs( -" EPRT command instead of PORT by using --disable-eprt. EPRT is really\n" -" PORT++.\n" -"\n" -" -q If used as the first parameter on the command line, the curlrc\n" -" config file will not be read and used. See the -K/--config for\n" -" details on the default config file search path.\n" -"\n" -" -Q/--quote \n" -" (FTP) Send an arbitrary command to the remote FTP server. Quote\n" -, stdout); - fputs( -" commands are sent BEFORE the transfer is taking place (just\n" -" after the initial PWD command to be exact). To make commands\n" -" take place after a successful transfer, prefix them with a dash\n" -" '-'. To make commands get sent after libcurl has changed working\n" -" directory, just before the transfer command(s), prefix the com-\n" -" mand with '+'. You may specify any amount of commands. If the\n" -, stdout); - fputs( -" server returns failure for one of the commands, the entire oper-\n" -" ation will be aborted. You must send syntactically correct FTP\n" -" commands as RFC959 defines.\n" -"\n" -" This option can be used multiple times.\n" -"\n" -" --random-file \n" -" (SSL) Specify the path name to file containing what will be con-\n" -" sidered as random data. The data is used to seed the random\n" -, stdout); - fputs( -" engine for SSL connections. See also the --egd-file option.\n" -"\n" -" -r/--range \n" -" (HTTP/FTP) Retrieve a byte range (i.e a partial document) from a\n" -" HTTP/1.1 or FTP server. Ranges can be specified in a number of\n" -" ways.\n" -"\n" -" 0-499 specifies the first 500 bytes\n" -"\n" -" 500-999 specifies the second 500 bytes\n" -"\n" -" -500 specifies the last 500 bytes\n" -"\n" -, stdout); - fputs( -" 9500- specifies the bytes from offset 9500 and forward\n" -"\n" -" 0-0,-1 specifies the first and last byte only(*)(H)\n" -"\n" -" 500-700,600-799\n" -" specifies 300 bytes from offset 500(H)\n" -"\n" -" 100-199,500-599\n" -" specifies two separate 100 bytes ranges(*)(H)\n" -"\n" -" (*) = NOTE that this will cause the server to reply with a multipart\n" -" response!\n" -"\n" -, stdout); - fputs( -" You should also be aware that many HTTP/1.1 servers do not have this\n" -" feature enabled, so that when you attempt to get a range, you'll\n" -" instead get the whole document.\n" -"\n" -" FTP range downloads only support the simple syntax 'start-stop'\n" -" (optionally with one of the numbers omitted). It depends on the non-RFC\n" -" command SIZE.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" -R/--remote-time\n" -, stdout); - fputs( -" When used, this will make libcurl attempt to figure out the\n" -" timestamp of the remote file, and if that is available make the\n" -" local file get that same timestamp.\n" -"\n" -" If this option is used twice, the second time disables this\n" -" again.\n" -"\n" -" --retry \n" -" If a transient error is returned when curl tries to perform a\n" -" transfer, it will retry this number of times before giving up.\n" -, stdout); - fputs( -" Setting the number to 0 makes curl do no retries (which is the\n" -" default). Transient error means either: a timeout, an FTP 5xx\n" -" response code or an HTTP 5xx response code.\n" -"\n" -" When curl is about to retry a transfer, it will first wait one\n" -" second and then for all forthcoming retries it will double the\n" -" waiting time until it reaches 10 minutes which then will be the\n" -, stdout); - fputs( -" delay between the rest of the retries. By using --retry-delay\n" -" you disable this exponential backoff algorithm. See also\n" -" --retry-max-time to limit the total time allowed for retries.\n" -" (Added in 7.12.3)\n" -"\n" -" If this option is used multiple times, the last occurrence\n" -" decide the amount.\n" -"\n" -" --retry-delay \n" -" Make curl sleep this amount of time between each retry when a\n" -, stdout); - fputs( -" transfer has failed with a transient error (it changes the\n" -" default backoff time algorithm between retries). This option is\n" -" only interesting if --retry is also used. Setting this delay to\n" -" zero will make curl use the default backoff time. (Added in\n" -" 7.12.3)\n" -"\n" -" If this option is used multiple times, the last occurrence\n" -" decide the amount.\n" -"\n" -" --retry-max-time \n" -, stdout); - fputs( -" The retry timer is reset before the first transfer attempt.\n" -" Retries will be done as usual (see --retry) as long as the timer\n" -" hasn't reached this given limit. Notice that if the timer hasn't\n" -" reached the limit, the request will be made and while perform-\n" -" ing, it may take longer than this given time period. To limit a\n" -" single request's maximum time, use -m/--max-time. Set this\n" -, stdout); - fputs( -" option to zero to not timeout retries. (Added in 7.12.3)\n" -"\n" -" If this option is used multiple times, the last occurrence\n" -" decide the amount.\n" -"\n" -" -s/--silent\n" -" Silent mode. Don't show progress meter or error messages. Makes\n" -" Curl mute.\n" -"\n" -" If this option is used twice, the second will again disable\n" -" silent mode.\n" -"\n" -" -S/--show-error\n" -, stdout); - fputs( -" When used with -s it makes curl show error message if it fails.\n" -" If this option is used twice, the second will again disable show\n" -" error.\n" -"\n" -" --socks4 \n" -" Use the specified SOCKS4 proxy. If the port number is not speci-\n" -" fied, it is assumed at port 1080. (Added in 7.15.2)\n" -"\n" -" This option overrides any previous use of -x/--proxy, as they\n" -" are mutually exclusive.\n" -"\n" -, stdout); - fputs( -" If this option is used several times, the last one will be used.\n" -"\n" -" --socks5 \n" -" Use the specified SOCKS5 proxy. If the port number is not speci-\n" -" fied, it is assumed at port 1080. (Added in 7.11.1)\n" -"\n" -" This option overrides any previous use of -x/--proxy, as they\n" -" are mutually exclusive.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -, stdout); - fputs( -" (This option was previously wrongly documented and used as\n" -" --socks without the number appended.)\n" -"\n" -" --stderr \n" -" Redirect all writes to stderr to the specified file instead. If\n" -" the file name is a plain '-', it is instead written to stdout.\n" -" This option has no point when you're using a shell with decent\n" -" redirecting capabilities.\n" -"\n" -, stdout); - fputs( -" If this option is used several times, the last one will be used.\n" -"\n" -" --tcp-nodelay\n" -" Turn on the TCP_NODELAY option. See the curl_easy_setopt(3) man\n" -" page for details about this option. (Added in 7.11.2)\n" -"\n" -" If this option is used several times, each occurrence toggles\n" -" this on/off.\n" -"\n" -" -t/--telnet-option \n" -" Pass options to the telnet protocol. Supported options are:\n" -"\n" -, stdout); - fputs( -" TTYPE= Sets the terminal type.\n" -"\n" -" XDISPLOC= Sets the X display location.\n" -"\n" -" NEW_ENV= Sets an environment variable.\n" -"\n" -" -T/--upload-file \n" -" This transfers the specified local file to the remote URL. If\n" -" there is no file part in the specified URL, Curl will append the\n" -" local file name. NOTE that you must use a trailing / on the last\n" -, stdout); - fputs( -" directory to really prove to Curl that there is no file name or\n" -" curl will think that your last directory name is the remote file\n" -" name to use. That will most likely cause the upload operation to\n" -" fail. If this is used on a http(s) server, the PUT command will\n" -" be used.\n" -"\n" -" Use the file name \"-\" (a single dash) to use stdin instead of a\n" -" given file.\n" -"\n" -, stdout); - fputs( -" You can specify one -T for each URL on the command line. Each -T\n" -" + URL pair specifies what to upload and to where. curl also sup-\n" -" ports \"globbing\" of the -T argument, meaning that you can upload\n" -" multiple files to a single URL by using the same URL globbing\n" -" style supported in the URL, like this:\n" -"\n" -" curl -T \"{file1,file2}\" http://www.uploadtothissite.com\n" -"\n" -" or even\n" -"\n" -, stdout); - fputs( -" curl -T \"img[1-1000].png\" ftp://ftp.picturemania.com/upload/\n" -"\n" -" --trace \n" -" Enables a full trace dump of all incoming and outgoing data,\n" -" including descriptive information, to the given output file. Use\n" -" \"-\" as filename to have the output sent to stdout.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" --trace-ascii \n" -, stdout); - fputs( -" Enables a full trace dump of all incoming and outgoing data,\n" -" including descriptive information, to the given output file. Use\n" -" \"-\" as filename to have the output sent to stdout.\n" -"\n" -" This is very similar to --trace, but leaves out the hex part and\n" -" only shows the ASCII part of the dump. It makes smaller output\n" -" that might be easier to read for untrained humans.\n" -"\n" -, stdout); - fputs( -" If this option is used several times, the last one will be used.\n" -"\n" -" --trace-time\n" -" Prepends a time stamp to each trace or verbose line that curl\n" -" displays. (Added in 7.14.0)\n" -"\n" -" If this option is used several times, each occurrence will tog-\n" -" gle it on/off.\n" -"\n" -" -u/--user \n" -" Specify user and password to use for server authentication.\n" -, stdout); - fputs( -" Overrides -n/--netrc and --netrc-optional.\n" -"\n" -" If you use an SSPI-enabled curl binary and do NTLM autentica-\n" -" tion, you can force curl to pick up the user name and password\n" -" from your environment by simply specifying a single colon with\n" -" this option: \"-u :\".\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" -U/--proxy-user \n" -, stdout); - fputs( -" Specify user and password to use for proxy authentication.\n" -"\n" -" If you use an SSPI-enabled curl binary and do NTLM autentica-\n" -" tion, you can force curl to pick up the user name and password\n" -" from your environment by simply specifying a single colon with\n" -" this option: \"-U :\".\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" --url \n" -, stdout); - fputs( -" Specify a URL to fetch. This option is mostly handy when you\n" -" want to specify URL(s) in a config file.\n" -"\n" -" This option may be used any number of times. To control where\n" -" this URL is written, use the -o/--output or the -O/--remote-name\n" -" options.\n" -"\n" -" -v/--verbose\n" -" Makes the fetching more verbose/talkative. Mostly usable for\n" -" debugging. Lines starting with '>' means \"header data\" sent by\n" -, stdout); - fputs( -" curl, '<' means \"header data\" received by curl that is hidden in\n" -" normal cases and lines starting with '*' means additional info\n" -" provided by curl.\n" -"\n" -" Note that if you only want HTTP headers in the output,\n" -" -i/--include might be option you're looking for.\n" -"\n" -" If you think this option still doesn't give you enough details,\n" -" consider using --trace or --trace-ascii instead.\n" -"\n" -, stdout); - fputs( -" If this option is used twice, the second will again disable ver-\n" -" bose.\n" -"\n" -" -V/--version\n" -" Displays information about curl and the libcurl version it uses.\n" -" The first line includes the full version of curl, libcurl and\n" -" other 3rd party libraries linked with the executable.\n" -"\n" -" The second line (starts with \"Protocols:\") shows all protocols\n" -" that libcurl reports to support.\n" -"\n" -, stdout); - fputs( -" The third line (starts with \"Features:\") shows specific features\n" -" libcurl reports to offer. Available features include:\n" -"\n" -" IPv6 You can use IPv6 with this.\n" -"\n" -" krb4 Krb4 for ftp is supported.\n" -"\n" -" SSL HTTPS and FTPS are supported.\n" -"\n" -" libz Automatic decompression of compressed files over HTTP is\n" -" supported.\n" -"\n" -" NTLM NTLM authentication is supported.\n" -"\n" -" GSS-Negotiate\n" -, stdout); - fputs( -" Negotiate authentication is supported.\n" -"\n" -" Debug This curl uses a libcurl built with Debug. This enables\n" -" more error-tracking and memory debugging etc. For curl-\n" -" developers only!\n" -"\n" -" AsynchDNS\n" -" This curl uses asynchronous name resolves.\n" -"\n" -" SPNEGO SPNEGO Negotiate authentication is supported.\n" -"\n" -" Largefile\n" -, stdout); - fputs( -" This curl supports transfers of large files, files larger\n" -" than 2GB.\n" -"\n" -" IDN This curl supports IDN - international domain names.\n" -"\n" -" SSPI SSPI is supported. If you use NTLM and set a blank user\n" -" name, curl will authenticate with your current user and\n" -" password.\n" -"\n" -" -w/--write-out \n" -" Defines what to display on stdout after a completed and success-\n" -, stdout); - fputs( -" ful operation. The format is a string that may contain plain\n" -" text mixed with any number of variables. The string can be spec-\n" -" ified as \"string\", to get read from a particular file you spec-\n" -" ify it \"@filename\" and to tell curl to read the format from\n" -" stdin you write \"@-\".\n" -"\n" -" The variables present in the output format will be substituted\n" -, stdout); - fputs( -" by the value or text that curl thinks fit, as described below.\n" -" All variables are specified like %{variable_name} and to output\n" -" a normal % you just write them like %%. You can output a newline\n" -" by using \\n, a carriage return with \\r and a tab space with \\t.\n" -" NOTE: The %-letter is a special letter in the win32-environment,\n" -" where all occurrences of % must be doubled when using this\n" -" option.\n" -"\n" -, stdout); - fputs( -" Available variables are at this point:\n" -"\n" -" url_effective The URL that was fetched last. This is mostly\n" -" meaningful if you've told curl to follow loca-\n" -" tion: headers.\n" -"\n" -" http_code The numerical code that was found in the last\n" -" retrieved HTTP(S) page.\n" -"\n" -" http_connect The numerical code that was found in the last\n" -, stdout); - fputs( -" response (from a proxy) to a curl CONNECT\n" -" request. (Added in 7.12.4)\n" -"\n" -" time_total The total time, in seconds, that the full opera-\n" -" tion lasted. The time will be displayed with mil-\n" -" lisecond resolution.\n" -"\n" -" time_namelookup\n" -" The time, in seconds, it took from the start\n" -, stdout); - fputs( -" until the name resolving was completed.\n" -"\n" -" time_connect The time, in seconds, it took from the start\n" -" until the connect to the remote host (or proxy)\n" -" was completed.\n" -"\n" -" time_pretransfer\n" -" The time, in seconds, it took from the start\n" -" until the file transfer is just about to begin.\n" -, stdout); - fputs( -" This includes all pre-transfer commands and nego-\n" -" tiations that are specific to the particular pro-\n" -" tocol(s) involved.\n" -"\n" -" time_redirect The time, in seconds, it took for all redirection\n" -" steps include name lookup, connect, pretransfer\n" -" and transfer before final transaction was\n" -, stdout); - fputs( -" started. time_redirect shows the complete execu-\n" -" tion time for multiple redirections. (Added in\n" -" 7.12.3)\n" -"\n" -" time_starttransfer\n" -" The time, in seconds, it took from the start\n" -" until the first byte is just about to be trans-\n" -" ferred. This includes time_pretransfer and also\n" -, stdout); - fputs( -" the time the server needs to calculate the\n" -" result.\n" -"\n" -" size_download The total amount of bytes that were downloaded.\n" -"\n" -" size_upload The total amount of bytes that were uploaded.\n" -"\n" -" size_header The total amount of bytes of the downloaded head-\n" -" ers.\n" -"\n" -" size_request The total amount of bytes that were sent in the\n" -, stdout); - fputs( -" HTTP request.\n" -"\n" -" speed_download The average download speed that curl measured for\n" -" the complete download.\n" -"\n" -" speed_upload The average upload speed that curl measured for\n" -" the complete upload.\n" -"\n" -" content_type The Content-Type of the requested document, if\n" -" there was any.\n" -"\n" -, stdout); - fputs( -" num_connects Number of new connects made in the recent trans-\n" -" fer. (Added in 7.12.3)\n" -"\n" -" num_redirects Number of redirects that were followed in the\n" -" request. (Added in 7.12.3)\n" -"\n" -" ftp_entry_path The initial path libcurl ended up in when logging\n" -" on to the remote FTP server. (Added in 7.15.4)\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -, stdout); - fputs( -" -x/--proxy \n" -" Use specified HTTP proxy. If the port number is not specified,\n" -" it is assumed at port 1080.\n" -"\n" -" This option overrides existing environment variables that sets\n" -" proxy to use. If there's an environment variable setting a\n" -" proxy, you can set proxy to \"\" to override it.\n" -"\n" -" Note that all operations that are performed over a HTTP proxy\n" -, stdout); - fputs( -" will transparently be converted to HTTP. It means that certain\n" -" protocol specific operations might not be available. This is not\n" -" the case if you can tunnel through the proxy, as done with the\n" -" -p/--proxytunnel option.\n" -"\n" -" Starting with 7.14.1, the proxy host can be specified the exact\n" -" same way as the proxy environment variables, include protocol\n" -" prefix (http://) and embedded user + password.\n" -"\n" -, stdout); - fputs( -" If this option is used several times, the last one will be used.\n" -"\n" -" -X/--request \n" -" (HTTP) Specifies a custom request method to use when communicat-\n" -" ing with the HTTP server. The specified request will be used\n" -" instead of the method otherwise used (which defaults to GET).\n" -" Read the HTTP 1.1 specification for details and explanations.\n" -"\n" -, stdout); - fputs( -" (FTP) Specifies a custom FTP command to use instead of LIST when\n" -" doing file lists with ftp.\n" -"\n" -" If this option is used several times, the last one will be used.\n" -"\n" -" -y/--speed-time