Compare commits

..

No commits in common. "main" and "curl-7_21_5" have entirely different histories.

2126 changed files with 54938 additions and 220728 deletions

View File

@ -1,10 +0,0 @@
;;; Directory Local Variables
;;; See Info node `(emacs) Directory Variables' for more information.
((nil . ((indent-tabs-mode . nil)
(show-trailing-whitespace . t)))
(c-mode . ((c-basic-offset . 2)
))
(c++-mode . ((c-basic-offset . 2)
))
)

4
.gitattributes vendored
View File

@ -1,5 +1 @@
*.dsw -crlf
buildconf eol=lf
configure.ac eol=lf
*.m4 eol=lf
*.in eol=lf

View File

@ -1,23 +0,0 @@
How to contribute to curl
=========================
Join the community
------------------
1. Click 'watch' on the github repo
2. Subscribe to the suitable [mailing lists](https://curl.haxx.se/mail/)
Read [CONTRIBUTE](../docs/CONTRIBUTE)
---------------------------------------
Send your suggestions using one of these methods:
-------------------------------------------------
1. in a mail to the mailing list
2. as a [pull request](https://github.com/curl/curl/pulls)
3. as an [issue](https://github.com/curl/curl/issues)
/ The cURL team!

View File

@ -1,9 +0,0 @@
### I did this
### I expected the following
### curl/libcurl version
[curl -V output perhaps?]
### operating system

55
.gitignore vendored
View File

@ -1,52 +1,43 @@
*.asc
.deps
.libs
*.lib
*.pdb
*.dll
*.exe
*.exp
*.la
*.lib
*.lo
*.o
*.obj
*.pdb
*~
*.asc
.*.swp
.cproject
.deps
.dirstamp
.libs
.project
.settings
/build/
/builds/
CHANGES.dist
Debug
INSTALL
Release
*.exp
Makefile
Makefile.in
Release
TAGS
aclocal.m4
aclocal.m4.bak
autom4te.cache
compile
config.cache
config.guess
config.log
config.status
config.sub
configure
curl-*.tar.bz2
curl-*.tar.gz
curl-*.tar.lzma
curl-*.zip
curl-config
depcomp
install-sh
libcurl.pc
libtool
ltmain.sh
compile
curl-config
libcurl.pc
missing
curl-*.tar.gz
curl-*.tar.bz2
curl-*.tar.lzma
curl-*.zip
INSTALL
install-sh
*.o
*.lo
*.la
mkinstalldirs
tags
test-driver
scripts/_curl
TAGS
*~
aclocal.m4.bak
CHANGES.dist

View File

@ -1,23 +0,0 @@
os:
- linux
- osx
sudo: false
language: c
install:
- if [ "$TRAVIS_OS_NAME" == "osx" ]; then brew update > /dev/null; fi
- if [ "$TRAVIS_OS_NAME" == "osx" ]; then brew install openssl libidn rtmpdump libssh2 c-ares libmetalink libressl nghttp2; fi
before_script:
- ./buildconf
script: ./configure --enable-debug && make && make test-full
compiler:
- clang
- gcc
notifications:
email: false

View File

@ -1,13 +1,8 @@
# Google Android makefile for curl and libcurl
#
# This file can be used when building curl using the full Android source
# release or the NDK. Most users do not want or need to do this; please
# instead read the Android section in docs/INSTALL for alternate
# methods.
#
# Place the curl source (including this makefile) into external/curl/ in the
# Android source tree. Then build them with 'make curl' or just 'make libcurl'
# from the Android root. Tested with Android versions 1.5, 2.1-2.3
# from the Android root. Tested with Android 1.5 and 2.1
#
# Note: you must first create a curl_config.h file by running configure in the
# Android environment. The only way I've found to do this is tricky. Perform a
@ -47,9 +42,9 @@
# into the right place (but see the note about this below).
#
# Dan Fandrich
# November 2011
# August 2010
LOCAL_PATH:= $(call my-dir)/../..
LOCAL_PATH:= $(call my-dir)
common_CFLAGS := -Wpointer-arith -Wwrite-strings -Wunused -Winline -Wnested-externs -Wmissing-declarations -Wmissing-prototypes -Wno-long-long -Wfloat-equal -Wno-multichar -Wsign-compare -Wno-format-nonliteral -Wendif-labels -Wstrict-prototypes -Wdeclaration-after-statement -Wno-system-headers -DHAVE_CONFIG_H
@ -67,7 +62,8 @@ CURL_HEADERS := \
mprintf.h \
multi.h \
stdcheaders.h \
typecheck-gcc.h
typecheck-gcc.h \
types.h
LOCAL_SRC_FILES := $(addprefix lib/,$(CSOURCES))
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include/
@ -77,7 +73,6 @@ LOCAL_COPY_HEADERS_TO := libcurl/curl
LOCAL_COPY_HEADERS := $(addprefix include/curl/,$(CURL_HEADERS))
LOCAL_MODULE:= libcurl
LOCAL_MODULE_TAGS := optional
# Copy the licence to a place where Android will find it.
# Actually, this doesn't quite work because the build system searches
@ -98,13 +93,12 @@ include $(LOCAL_PATH)/src/Makefile.inc
LOCAL_SRC_FILES := $(addprefix src/,$(CURL_CFILES))
LOCAL_MODULE := curl
LOCAL_MODULE_TAGS := optional
LOCAL_STATIC_LIBRARIES := libcurl
LOCAL_SYSTEM_SHARED_LIBRARIES := libc
LOCAL_C_INCLUDES += $(LOCAL_PATH)/include $(LOCAL_PATH)/lib
# This may also need to include $(CURLX_CFILES) in order to correctly link
# This may also need to include $(CURLX_ONES) in order to correctly link
# if libcurl is changed to be built as a dynamic library
LOCAL_CFLAGS += $(common_CFLAGS)

14
CHANGES
View File

@ -1,7 +1,15 @@
See https://curl.haxx.se/changes.html for the edited and human readable online
version of what has changed over the years in different curl releases.
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
Generate a CHANGES file like the one present in evey release like this:
Changelog
This file no longer holds the changelog. Now you can generate it yourself
like this:
$ git log --pretty=fuller --no-color --date=short --decorate=full | \
./log2changes.pl
The older, manually edited, changelog is found in git named CHANGES.0

528
CHANGES.0

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,75 @@
# - Check if the source code provided in the SOURCE argument compiles.
# CURL_CHECK_C_SOURCE_COMPILES(SOURCE VAR)
# - macro which checks if the source code compiles
# SOURCE - source code to try to compile
# VAR - variable to store whether the source code compiled
#
# The following variables may be set before calling this macro to
# modify the way the check is run:
#
# CMAKE_REQUIRED_FLAGS = string of compile command line flags
# CMAKE_REQUIRED_DEFINITIONS = list of macros to define (-DFOO=bar)
# CMAKE_REQUIRED_INCLUDES = list of include directories
# CMAKE_REQUIRED_LIBRARIES = list of libraries to link
macro(CURL_CHECK_C_SOURCE_COMPILES SOURCE VAR)
if("${VAR}" MATCHES "^${VAR}$" OR "${VAR}" MATCHES "UNKNOWN")
set(message "${VAR}")
# If the number of arguments is greater than 2 (SOURCE VAR)
if(${ARGC} GREATER 2)
# then add the third argument as a message
set(message "${ARGV2} (${VAR})")
endif(${ARGC} GREATER 2)
set(MACRO_CHECK_FUNCTION_DEFINITIONS
"-D${VAR} ${CMAKE_REQUIRED_FLAGS}")
if(CMAKE_REQUIRED_LIBRARIES)
set(CURL_CHECK_C_SOURCE_COMPILES_ADD_LIBRARIES
"-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
else(CMAKE_REQUIRED_LIBRARIES)
set(CURL_CHECK_C_SOURCE_COMPILES_ADD_LIBRARIES)
endif(CMAKE_REQUIRED_LIBRARIES)
if(CMAKE_REQUIRED_INCLUDES)
set(CURL_CHECK_C_SOURCE_COMPILES_ADD_INCLUDES
"-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}")
else(CMAKE_REQUIRED_INCLUDES)
set(CURL_CHECK_C_SOURCE_COMPILES_ADD_INCLUDES)
endif(CMAKE_REQUIRED_INCLUDES)
set(src "")
foreach(def ${EXTRA_DEFINES})
set(src "${src}#define ${def} 1\n")
endforeach(def)
foreach(inc ${HEADER_INCLUDES})
set(src "${src}#include <${inc}>\n")
endforeach(inc)
set(src "${src}\nint main() { ${SOURCE} ; return 0; }")
set(CMAKE_CONFIGURABLE_FILE_CONTENT "${src}")
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/CMake/CMakeConfigurableFile.in
"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/src.c"
IMMEDIATE)
message(STATUS "Performing Test ${message}")
try_compile(${VAR}
${CMAKE_BINARY_DIR}
${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/src.c
COMPILE_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS}
CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=${MACRO_CHECK_FUNCTION_DEFINITIONS}
"${CURL_CHECK_C_SOURCE_COMPILES_ADD_LIBRARIES}"
"${CURL_CHECK_C_SOURCE_COMPILES_ADD_INCLUDES}"
OUTPUT_VARIABLE OUTPUT)
if(${VAR})
set(${VAR} 1 CACHE INTERNAL "Test ${message}")
message(STATUS "Performing Test ${message} - Success")
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
"Performing C SOURCE FILE Test ${message} succeded with the following output:\n"
"${OUTPUT}\n"
"Source file was:\n${src}\n")
else(${VAR})
message(STATUS "Performing Test ${message} - Failed")
set(${VAR} "" CACHE INTERNAL "Test ${message}")
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Performing C SOURCE FILE Test ${message} failed with the following output:\n"
"${OUTPUT}\n"
"Source file was:\n${src}\n")
endif(${VAR})
endif("${VAR}" MATCHES "^${VAR}$" OR "${VAR}" MATCHES "UNKNOWN")
endmacro(CURL_CHECK_C_SOURCE_COMPILES)

View File

@ -0,0 +1,83 @@
# - Check if the source code provided in the SOURCE argument compiles and runs.
# CURL_CHECK_C_SOURCE_RUNS(SOURCE VAR)
# - macro which checks if the source code runs
# SOURCE - source code to try to compile
# VAR - variable to store size if the type exists.
#
# The following variables may be set before calling this macro to
# modify the way the check is run:
#
# CMAKE_REQUIRED_FLAGS = string of compile command line flags
# CMAKE_REQUIRED_DEFINITIONS = list of macros to define (-DFOO=bar)
# CMAKE_REQUIRED_INCLUDES = list of include directories
# CMAKE_REQUIRED_LIBRARIES = list of libraries to link
macro(CURL_CHECK_C_SOURCE_RUNS SOURCE VAR)
if("${VAR}" MATCHES "^${VAR}$" OR "${VAR}" MATCHES "UNKNOWN")
set(message "${VAR}")
# If the number of arguments is greater than 2 (SOURCE VAR)
if(${ARGC} GREATER 2)
# then add the third argument as a message
set(message "${ARGV2} (${VAR})")
endif(${ARGC} GREATER 2)
set(MACRO_CHECK_FUNCTION_DEFINITIONS
"-D${VAR} ${CMAKE_REQUIRED_FLAGS}")
if(CMAKE_REQUIRED_LIBRARIES)
set(CURL_CHECK_C_SOURCE_COMPILES_ADD_LIBRARIES
"-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
else(CMAKE_REQUIRED_LIBRARIES)
set(CURL_CHECK_C_SOURCE_COMPILES_ADD_LIBRARIES)
endif(CMAKE_REQUIRED_LIBRARIES)
if(CMAKE_REQUIRED_INCLUDES)
set(CURL_CHECK_C_SOURCE_COMPILES_ADD_INCLUDES
"-DINCLUDE_DIRECTORIES:STRING=${CMAKE_REQUIRED_INCLUDES}")
else(CMAKE_REQUIRED_INCLUDES)
set(CURL_CHECK_C_SOURCE_COMPILES_ADD_INCLUDES)
endif(CMAKE_REQUIRED_INCLUDES)
set(src "")
foreach(def ${EXTRA_DEFINES})
set(src "${src}#define ${def} 1\n")
endforeach(def)
foreach(inc ${HEADER_INCLUDES})
set(src "${src}#include <${inc}>\n")
endforeach(inc)
set(src "${src}\nint main() { ${SOURCE} ; return 0; }")
set(CMAKE_CONFIGURABLE_FILE_CONTENT "${src}")
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/CMake/CMakeConfigurableFile.in
"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/src.c"
IMMEDIATE)
message(STATUS "Performing Test ${message}")
try_run(${VAR} ${VAR}_COMPILED
${CMAKE_BINARY_DIR}
${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeTmp/src.c
COMPILE_DEFINITIONS ${CMAKE_REQUIRED_DEFINITIONS}
CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=${MACRO_CHECK_FUNCTION_DEFINITIONS}
"${CURL_CHECK_C_SOURCE_COMPILES_ADD_LIBRARIES}"
"${CURL_CHECK_C_SOURCE_COMPILES_ADD_INCLUDES}"
OUTPUT_VARIABLE OUTPUT)
# if it did not compile make the return value fail code of 1
if(NOT ${VAR}_COMPILED)
set(${VAR} 1)
endif(NOT ${VAR}_COMPILED)
# if the return value was 0 then it worked
set(result_var ${${VAR}})
if("${result_var}" EQUAL 0)
set(${VAR} 1 CACHE INTERNAL "Test ${message}")
message(STATUS "Performing Test ${message} - Success")
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
"Performing C SOURCE FILE Test ${message} succeded with the following output:\n"
"${OUTPUT}\n"
"Return value: ${${VAR}}\n"
"Source file was:\n${src}\n")
else("${result_var}" EQUAL 0)
message(STATUS "Performing Test ${message} - Failed")
set(${VAR} "" CACHE INTERNAL "Test ${message}")
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Performing C SOURCE FILE Test ${message} failed with the following output:\n"
"${OUTPUT}\n"
"Return value: ${result_var}\n"
"Source file was:\n${src}\n")
endif("${result_var}" EQUAL 0)
endif("${VAR}" MATCHES "^${VAR}$" OR "${VAR}" MATCHES "UNKNOWN")
endmacro(CURL_CHECK_C_SOURCE_RUNS)

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2014, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -71,88 +71,264 @@ main ()
}
#endif
/* tests for gethostbyaddr_r or gethostbyname_r */
#if defined(HAVE_GETHOSTBYADDR_R_5_REENTRANT) || \
defined(HAVE_GETHOSTBYADDR_R_7_REENTRANT) || \
defined(HAVE_GETHOSTBYADDR_R_8_REENTRANT) || \
defined(HAVE_GETHOSTBYNAME_R_3_REENTRANT) || \
defined(HAVE_GETHOSTBYNAME_R_5_REENTRANT) || \
defined(HAVE_GETHOSTBYNAME_R_6_REENTRANT)
# define _REENTRANT
/* no idea whether _REENTRANT is always set, just invent a new flag */
# define TEST_GETHOSTBYFOO_REENTRANT
#endif
#if defined(HAVE_GETHOSTBYADDR_R_5) || \
defined(HAVE_GETHOSTBYADDR_R_7) || \
defined(HAVE_GETHOSTBYADDR_R_8) || \
defined(HAVE_GETHOSTBYNAME_R_3) || \
defined(HAVE_GETHOSTBYNAME_R_5) || \
defined(HAVE_GETHOSTBYNAME_R_6) || \
defined(TEST_GETHOSTBYFOO_REENTRANT)
#ifdef HAVE_GETHOSTBYADDR_R_5
#include <sys/types.h>
#include <netdb.h>
int main(void)
int
main ()
{
char *address = "example.com";
int length = 0;
int type = 0;
struct hostent h;
int rc = 0;
#if defined(HAVE_GETHOSTBYADDR_R_5) || \
defined(HAVE_GETHOSTBYADDR_R_5_REENTRANT) || \
\
defined(HAVE_GETHOSTBYNAME_R_3) || \
defined(HAVE_GETHOSTBYNAME_R_3_REENTRANT)
struct hostent_data hdata;
#elif defined(HAVE_GETHOSTBYADDR_R_7) || \
defined(HAVE_GETHOSTBYADDR_R_7_REENTRANT) || \
defined(HAVE_GETHOSTBYADDR_R_8) || \
defined(HAVE_GETHOSTBYADDR_R_8_REENTRANT) || \
\
defined(HAVE_GETHOSTBYNAME_R_5) || \
defined(HAVE_GETHOSTBYNAME_R_5_REENTRANT) || \
defined(HAVE_GETHOSTBYNAME_R_6) || \
defined(HAVE_GETHOSTBYNAME_R_6_REENTRANT)
char buffer[8192];
int h_errnop;
struct hostent *hp;
char * address;
int length;
int type;
struct hostent h;
struct hostent_data hdata;
int rc;
#ifndef gethostbyaddr_r
(void)gethostbyaddr_r;
#endif
rc = gethostbyaddr_r(address, length, type, &h, &hdata);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYADDR_R_5_REENTRANT
#define _REENTRANT
#include <sys/types.h>
#include <netdb.h>
int
main ()
{
char * address;
int length;q
int type;
struct hostent h;
struct hostent_data hdata;
int rc;
#ifndef gethostbyaddr_r
(void)gethostbyaddr_r;
#endif
rc = gethostbyaddr_r(address, length, type, &h, &hdata);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYADDR_R_7
#include <sys/types.h>
#include <netdb.h>
int
main ()
{
char * address;
int length;
int type;
struct hostent h;
char buffer[8192];
int h_errnop;
struct hostent * hp;
#ifndef gethostbyaddr_r
(void)gethostbyaddr_r;
#endif
#if defined(HAVE_GETHOSTBYADDR_R_5) || \
defined(HAVE_GETHOSTBYADDR_R_5_REENTRANT)
rc = gethostbyaddr_r(address, length, type, &h, &hdata);
#elif defined(HAVE_GETHOSTBYADDR_R_7) || \
defined(HAVE_GETHOSTBYADDR_R_7_REENTRANT)
hp = gethostbyaddr_r(address, length, type, &h, buffer, 8192, &h_errnop);
(void)hp;
#elif defined(HAVE_GETHOSTBYADDR_R_8) || \
defined(HAVE_GETHOSTBYADDR_R_8_REENTRANT)
rc = gethostbyaddr_r(address, length, type, &h, buffer, 8192, &hp, &h_errnop);
#endif
#if defined(HAVE_GETHOSTBYNAME_R_3) || \
defined(HAVE_GETHOSTBYNAME_R_3_REENTRANT)
rc = gethostbyname_r(address, &h, &hdata);
#elif defined(HAVE_GETHOSTBYNAME_R_5) || \
defined(HAVE_GETHOSTBYNAME_R_5_REENTRANT)
rc = gethostbyname_r(address, &h, buffer, 8192, &h_errnop);
(void)hp; /* not used for test */
#elif defined(HAVE_GETHOSTBYNAME_R_6) || \
defined(HAVE_GETHOSTBYNAME_R_6_REENTRANT)
rc = gethostbyname_r(address, &h, buffer, 8192, &hp, &h_errnop);
#endif
(void)length;
(void)type;
(void)rc;
hp = gethostbyaddr_r(address, length, type, &h,
buffer, 8192, &h_errnop);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYADDR_R_7_REENTRANT
#define _REENTRANT
#include <sys/types.h>
#include <netdb.h>
int
main ()
{
char * address;
int length;
int type;
struct hostent h;
char buffer[8192];
int h_errnop;
struct hostent * hp;
#ifndef gethostbyaddr_r
(void)gethostbyaddr_r;
#endif
hp = gethostbyaddr_r(address, length, type, &h,
buffer, 8192, &h_errnop);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYADDR_R_8
#include <sys/types.h>
#include <netdb.h>
int
main ()
{
char * address;
int length;
int type;
struct hostent h;
char buffer[8192];
int h_errnop;
struct hostent * hp;
int rc;
#ifndef gethostbyaddr_r
(void)gethostbyaddr_r;
#endif
rc = gethostbyaddr_r(address, length, type, &h,
buffer, 8192, &hp, &h_errnop);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYADDR_R_8_REENTRANT
#define _REENTRANT
#include <sys/types.h>
#include <netdb.h>
int
main ()
{
char * address;
int length;
int type;
struct hostent h;
char buffer[8192];
int h_errnop;
struct hostent * hp;
int rc;
#ifndef gethostbyaddr_r
(void)gethostbyaddr_r;
#endif
rc = gethostbyaddr_r(address, length, type, &h,
buffer, 8192, &hp, &h_errnop);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYNAME_R_3
#include <string.h>
#include <sys/types.h>
#include <netdb.h>
#undef NULL
#define NULL (void *)0
int
main ()
{
struct hostent_data data;
#ifndef gethostbyname_r
(void)gethostbyname_r;
#endif
gethostbyname_r(NULL, NULL, NULL);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYNAME_R_3_REENTRANT
#define _REENTRANT
#include <string.h>
#include <sys/types.h>
#include <netdb.h>
#undef NULL
#define NULL (void *)0
int
main ()
{
struct hostent_data data;
#ifndef gethostbyname_r
(void)gethostbyname_r;
#endif
gethostbyname_r(NULL, NULL, NULL);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYNAME_R_5
#include <sys/types.h>
#include <netinet/in.h>
#include <netdb.h>
#undef NULL
#define NULL (void *)0
int
main ()
{
#ifndef gethostbyname_r
(void)gethostbyname_r;
#endif
gethostbyname_r(NULL, NULL, NULL, 0, NULL);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYNAME_R_5_REENTRANT
#define _REENTRANT
#include <sys/types.h>
#include <netdb.h>
#undef NULL
#define NULL (void *)0
int
main ()
{
#ifndef gethostbyname_r
(void)gethostbyname_r;
#endif
gethostbyname_r(NULL, NULL, NULL, 0, NULL);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYNAME_R_6
#include <sys/types.h>
#include <netdb.h>
#undef NULL
#define NULL (void *)0
int
main ()
{
#ifndef gethostbyname_r
(void)gethostbyname_r;
#endif
gethostbyname_r(NULL, NULL, NULL, 0, NULL, NULL);
;
return 0;
}
#endif
#ifdef HAVE_GETHOSTBYNAME_R_6_REENTRANT
#define _REENTRANT
#include <sys/types.h>
#include <netdb.h>
#undef NULL
#define NULL (void *)0
int
main ()
{
#ifndef gethostbyname_r
(void)gethostbyname_r;
#endif
gethostbyname_r(NULL, NULL, NULL, 0, NULL, NULL);
;
return 0;
}
#endif
#ifdef HAVE_SOCKLEN_T
#ifdef _WIN32
#include <ws2tcpip.h>

2
CMake/FindCARES.cmake Normal file → Executable file
View File

@ -11,7 +11,7 @@ FIND_PATH(CARES_INCLUDE_DIR ares.h
/usr/local/include
/usr/include
)
SET(CARES_NAMES ${CARES_NAMES} cares)
FIND_LIBRARY(CARES_LIBRARY
NAMES ${CARES_NAMES}

View File

@ -1,289 +0,0 @@
# - Try to find the GSS Kerberos library
# Once done this will define
#
# GSS_ROOT_DIR - Set this variable to the root installation of GSS
#
# Read-Only variables:
# GSS_FOUND - system has the Heimdal library
# GSS_FLAVOUR - "MIT" or "Heimdal" if anything found.
# GSS_INCLUDE_DIR - the Heimdal include directory
# GSS_LIBRARIES - The libraries needed to use GSS
# GSS_LINK_DIRECTORIES - Directories to add to linker search path
# GSS_LINKER_FLAGS - Additional linker flags
# GSS_COMPILER_FLAGS - Additional compiler flags
# GSS_VERSION - This is set to version advertised by pkg-config or read from manifest.
# In case the library is found but no version info availabe it'll be set to "unknown"
set(_MIT_MODNAME mit-krb5-gssapi)
set(_HEIMDAL_MODNAME heimdal-gssapi)
include(CheckIncludeFile)
include(CheckIncludeFiles)
include(CheckTypeSize)
set(_GSS_ROOT_HINTS
"${GSS_ROOT_DIR}"
"$ENV{GSS_ROOT_DIR}"
)
# try to find library using system pkg-config if user didn't specify root dir
if(NOT GSS_ROOT_DIR AND NOT "$ENV{GSS_ROOT_DIR}")
if(UNIX)
find_package(PkgConfig QUIET)
pkg_search_module(_GSS_PKG ${_MIT_MODNAME} ${_HEIMDAL_MODNAME})
list(APPEND _GSS_ROOT_HINTS "${_GSS_PKG_PREFIX}")
elseif(WIN32)
list(APPEND _GSS_ROOT_HINTS "[HKEY_LOCAL_MACHINE\\SOFTWARE\\MIT\\Kerberos;InstallDir]")
endif()
endif()
if(NOT _GSS_FOUND) #not found by pkg-config. Let's take more traditional approach.
find_file(_GSS_CONFIGURE_SCRIPT
NAMES
"krb5-config"
HINTS
${_GSS_ROOT_HINTS}
PATH_SUFFIXES
bin
NO_CMAKE_PATH
NO_CMAKE_ENVIRONMENT_PATH
)
# if not found in user-supplied directories, maybe system knows better
find_file(_GSS_CONFIGURE_SCRIPT
NAMES
"krb5-config"
PATH_SUFFIXES
bin
)
if(_GSS_CONFIGURE_SCRIPT)
execute_process(
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--cflags" "gssapi"
OUTPUT_VARIABLE _GSS_CFLAGS
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
)
message(STATUS "CFLAGS: ${_GSS_CFLAGS}")
if(NOT _GSS_CONFIGURE_FAILED) # 0 means success
# should also work in an odd case when multiple directories are given
string(STRIP "${_GSS_CFLAGS}" _GSS_CFLAGS)
string(REGEX REPLACE " +-I" ";" _GSS_CFLAGS "${_GSS_CFLAGS}")
string(REGEX REPLACE " +-([^I][^ \\t;]*)" ";-\\1"_GSS_CFLAGS "${_GSS_CFLAGS}")
foreach(_flag ${_GSS_CFLAGS})
if(_flag MATCHES "^-I.*")
string(REGEX REPLACE "^-I" "" _val "${_flag}")
list(APPEND _GSS_INCLUDE_DIR "${_val}")
else()
list(APPEND _GSS_COMPILER_FLAGS "${_flag}")
endif()
endforeach()
endif()
execute_process(
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--libs" "gssapi"
OUTPUT_VARIABLE _GSS_LIB_FLAGS
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
)
message(STATUS "LDFLAGS: ${_GSS_LIB_FLAGS}")
if(NOT _GSS_CONFIGURE_FAILED) # 0 means success
# this script gives us libraries and link directories. Blah. We have to deal with it.
string(STRIP "${_GSS_LIB_FLAGS}" _GSS_LIB_FLAGS)
string(REGEX REPLACE " +-(L|l)" ";-\\1" _GSS_LIB_FLAGS "${_GSS_LIB_FLAGS}")
string(REGEX REPLACE " +-([^Ll][^ \\t;]*)" ";-\\1"_GSS_LIB_FLAGS "${_GSS_LIB_FLAGS}")
foreach(_flag ${_GSS_LIB_FLAGS})
if(_flag MATCHES "^-l.*")
string(REGEX REPLACE "^-l" "" _val "${_flag}")
list(APPEND _GSS_LIBRARIES "${_val}")
elseif(_flag MATCHES "^-L.*")
string(REGEX REPLACE "^-L" "" _val "${_flag}")
list(APPEND _GSS_LINK_DIRECTORIES "${_val}")
else()
list(APPEND _GSS_LINKER_FLAGS "${_flag}")
endif()
endforeach()
endif()
execute_process(
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--version"
OUTPUT_VARIABLE _GSS_VERSION
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
)
# older versions may not have the "--version" parameter. In this case we just don't care.
if(_GSS_CONFIGURE_FAILED)
set(_GSS_VERSION 0)
endif()
execute_process(
COMMAND ${_GSS_CONFIGURE_SCRIPT} "--vendor"
OUTPUT_VARIABLE _GSS_VENDOR
RESULT_VARIABLE _GSS_CONFIGURE_FAILED
)
# older versions may not have the "--vendor" parameter. In this case we just don't care.
if(_GSS_CONFIGURE_FAILED)
set(GSS_FLAVOUR "Heimdal") # most probably, shouldn't really matter
else()
if(_GSS_VENDOR MATCHES ".*H|heimdal.*")
set(GSS_FLAVOUR "Heimdal")
else()
set(GSS_FLAVOUR "MIT")
endif()
endif()
else() # either there is no config script or we are on platform that doesn't provide one (Windows?)
find_path(_GSS_INCLUDE_DIR
NAMES
"gssapi/gssapi.h"
HINTS
${_GSS_ROOT_HINTS}
PATH_SUFFIXES
include
inc
)
if(_GSS_INCLUDE_DIR) #jay, we've found something
set(CMAKE_REQUIRED_INCLUDES "${_GSS_INCLUDE_DIR}")
check_include_files( "gssapi/gssapi_generic.h;gssapi/gssapi_krb5.h" _GSS_HAVE_MIT_HEADERS)
if(_GSS_HAVE_MIT_HEADERS)
set(GSS_FLAVOUR "MIT")
else()
# prevent compiling the header - just check if we can include it
set(CMAKE_REQUIRED_DEFINITIONS "${CMAKE_REQUIRED_DEFINITIONS} -D__ROKEN_H__")
check_include_file( "roken.h" _GSS_HAVE_ROKEN_H)
check_include_file( "heimdal/roken.h" _GSS_HAVE_HEIMDAL_ROKEN_H)
if(_GSS_HAVE_ROKEN_H OR _GSS_HAVE_HEIMDAL_ROKEN_H)
set(GSS_FLAVOUR "Heimdal")
endif()
set(CMAKE_REQUIRED_DEFINITIONS "")
endif()
else()
# I'm not convienced if this is the right way but this is what autotools do at the moment
find_path(_GSS_INCLUDE_DIR
NAMES
"gssapi.h"
HINTS
${_GSS_ROOT_HINTS}
PATH_SUFFIXES
include
inc
)
if(_GSS_INCLUDE_DIR)
set(GSS_FLAVOUR "Heimdal")
endif()
endif()
# if we have headers, check if we can link libraries
if(GSS_FLAVOUR)
set(_GSS_LIBDIR_SUFFIXES "")
set(_GSS_LIBDIR_HINTS ${_GSS_ROOT_HINTS})
get_filename_component(_GSS_CALCULATED_POTENTIAL_ROOT "${_GSS_INCLUDE_DIR}" PATH)
list(APPEND _GSS_LIBDIR_HINTS ${_GSS_CALCULATED_POTENTIAL_ROOT})
if(WIN32)
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
list(APPEND _GSS_LIBDIR_SUFFIXES "lib/AMD64")
if(GSS_FLAVOUR STREQUAL "MIT")
set(_GSS_LIBNAME "gssapi64")
else()
set(_GSS_LIBNAME "libgssapi")
endif()
else()
list(APPEND _GSS_LIBDIR_SUFFIXES "lib/i386")
if(GSS_FLAVOUR STREQUAL "MIT")
set(_GSS_LIBNAME "gssapi32")
else()
set(_GSS_LIBNAME "libgssapi")
endif()
endif()
else()
list(APPEND _GSS_LIBDIR_SUFFIXES "lib;lib64") # those suffixes are not checked for HINTS
if(GSS_FLAVOUR STREQUAL "MIT")
set(_GSS_LIBNAME "gssapi_krb5")
else()
set(_GSS_LIBNAME "gssapi")
endif()
endif()
find_library(_GSS_LIBRARIES
NAMES
${_GSS_LIBNAME}
HINTS
${_GSS_LIBDIR_HINTS}
PATH_SUFFIXES
${_GSS_LIBDIR_SUFFIXES}
)
endif()
endif()
else()
if(_GSS_PKG_${_MIT_MODNAME}_VERSION)
set(GSS_FLAVOUR "MIT")
set(_GSS_VERSION _GSS_PKG_${_MIT_MODNAME}_VERSION)
else()
set(GSS_FLAVOUR "Heimdal")
set(_GSS_VERSION _GSS_PKG_${_MIT_HEIMDAL}_VERSION)
endif()
endif()
set(GSS_INCLUDE_DIR ${_GSS_INCLUDE_DIR})
set(GSS_LIBRARIES ${_GSS_LIBRARIES})
set(GSS_LINK_DIRECTORIES ${_GSS_LINK_DIRECTORIES})
set(GSS_LINKER_FLAGS ${_GSS_LINKER_FLAGS})
set(GSS_COMPILER_FLAGS ${_GSS_COMPILER_FLAGS})
set(GSS_VERSION ${_GSS_VERSION})
if(GSS_FLAVOUR)
if(NOT GSS_VERSION AND GSS_FLAVOUR STREQUAL "Heimdal")
if(CMAKE_SIZEOF_VOID_P EQUAL 8)
set(HEIMDAL_MANIFEST_FILE "Heimdal.Application.amd64.manifest")
else()
set(HEIMDAL_MANIFEST_FILE "Heimdal.Application.x86.manifest")
endif()
if(EXISTS "${GSS_INCLUDE_DIR}/${HEIMDAL_MANIFEST_FILE}")
file(STRINGS "${GSS_INCLUDE_DIR}/${HEIMDAL_MANIFEST_FILE}" heimdal_version_str
REGEX "^.*version=\"[0-9]\\.[^\"]+\".*$")
string(REGEX MATCH "[0-9]\\.[^\"]+"
GSS_VERSION "${heimdal_version_str}")
endif()
if(NOT GSS_VERSION)
set(GSS_VERSION "Heimdal Unknown")
endif()
elseif(NOT GSS_VERSION AND GSS_FLAVOUR STREQUAL "MIT")
get_filename_component(_MIT_VERSION "[HKEY_LOCAL_MACHINE\\SOFTWARE\\MIT\\Kerberos\\SDK\\CurrentVersion;VersionString]" NAME CACHE)
if(WIN32 AND _MIT_VERSION)
set(GSS_VERSION "${_MIT_VERSION}")
else()
set(GSS_VERSION "MIT Unknown")
endif()
endif()
endif()
include(FindPackageHandleStandardArgs)
set(_GSS_REQUIRED_VARS GSS_LIBRARIES GSS_FLAVOUR)
find_package_handle_standard_args(GSS
REQUIRED_VARS
${_GSS_REQUIRED_VARS}
VERSION_VAR
GSS_VERSION
FAIL_MESSAGE
"Could NOT find GSS, try to set the path to GSS root folder in the system variable GSS_ROOT_DIR"
)
mark_as_advanced(GSS_INCLUDE_DIR GSS_LIBRARIES)

View File

@ -1,35 +0,0 @@
# - Try to find the libssh2 library
# Once done this will define
#
# LIBSSH2_FOUND - system has the libssh2 library
# LIBSSH2_INCLUDE_DIR - the libssh2 include directory
# LIBSSH2_LIBRARY - the libssh2 library name
if (LIBSSH2_INCLUDE_DIR AND LIBSSH2_LIBRARY)
set(LibSSH2_FIND_QUIETLY TRUE)
endif (LIBSSH2_INCLUDE_DIR AND LIBSSH2_LIBRARY)
FIND_PATH(LIBSSH2_INCLUDE_DIR libssh2.h
)
FIND_LIBRARY(LIBSSH2_LIBRARY NAMES ssh2
)
if(LIBSSH2_INCLUDE_DIR)
file(STRINGS "${LIBSSH2_INCLUDE_DIR}/libssh2.h" libssh2_version_str REGEX "^#define[\t ]+LIBSSH2_VERSION_NUM[\t ]+0x[0-9][0-9][0-9][0-9][0-9][0-9].*")
string(REGEX REPLACE "^.*LIBSSH2_VERSION_NUM[\t ]+0x([0-9][0-9]).*$" "\\1" LIBSSH2_VERSION_MAJOR "${libssh2_version_str}")
string(REGEX REPLACE "^.*LIBSSH2_VERSION_NUM[\t ]+0x[0-9][0-9]([0-9][0-9]).*$" "\\1" LIBSSH2_VERSION_MINOR "${libssh2_version_str}")
string(REGEX REPLACE "^.*LIBSSH2_VERSION_NUM[\t ]+0x[0-9][0-9][0-9][0-9]([0-9][0-9]).*$" "\\1" LIBSSH2_VERSION_PATCH "${libssh2_version_str}")
string(REGEX REPLACE "^0(.+)" "\\1" LIBSSH2_VERSION_MAJOR "${LIBSSH2_VERSION_MAJOR}")
string(REGEX REPLACE "^0(.+)" "\\1" LIBSSH2_VERSION_MINOR "${LIBSSH2_VERSION_MINOR}")
string(REGEX REPLACE "^0(.+)" "\\1" LIBSSH2_VERSION_PATCH "${LIBSSH2_VERSION_PATCH}")
set(LIBSSH2_VERSION "${LIBSSH2_VERSION_MAJOR}.${LIBSSH2_VERSION_MINOR}.${LIBSSH2_VERSION_PATCH}")
endif(LIBSSH2_INCLUDE_DIR)
include(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(LibSSH2 DEFAULT_MSG LIBSSH2_INCLUDE_DIR LIBSSH2_LIBRARY )
MARK_AS_ADVANCED(LIBSSH2_INCLUDE_DIR LIBSSH2_LIBRARY LIBSSH2_VERSION_MAJOR LIBSSH2_VERSION_MINOR LIBSSH2_VERSION_PATCH LIBSSH2_VERSION)

19
CMake/FindOpenSSL.cmake Normal file
View File

@ -0,0 +1,19 @@
# Extension of the standard FindOpenSSL.cmake
# Adds OPENSSL_INCLUDE_DIRS and libeay32
include("${CMAKE_ROOT}/Modules/FindOpenSSL.cmake")
# Bill Hoffman told that libeay32 is necessary for him:
find_library(SSL_LIBEAY NAMES libeay32)
if(OPENSSL_FOUND)
if(SSL_LIBEAY)
list(APPEND OPENSSL_LIBRARIES ${SSL_LIBEAY})
else()
set(OPENSSL_FOUND FALSE)
endif()
endif()
if(OPENSSL_FOUND)
set(OPENSSL_INCLUDE_DIRS ${OPENSSL_INCLUDE_DIR})
endif()

8
CMake/FindZLIB.cmake Normal file
View File

@ -0,0 +1,8 @@
# Locate zlib
include("${CMAKE_ROOT}/Modules/FindZLIB.cmake")
find_library(ZLIB_LIBRARY_DEBUG NAMES zd zlibd zdlld zlib1d )
if(ZLIB_FOUND AND ZLIB_LIBRARY_DEBUG)
set( ZLIB_LIBRARIES optimized "${ZLIB_LIBRARY}" debug ${ZLIB_LIBRARY_DEBUG})
endif()

View File

@ -1,95 +0,0 @@
#File defines convenience macros for available feature testing
# This macro checks if the symbol exists in the library and if it
# does, it prepends library to the list. It is intended to be called
# multiple times with a sequence of possibly dependent libraries in
# order of least-to-most-dependent. Some libraries depend on others
# to link correctly.
macro(CHECK_LIBRARY_EXISTS_CONCAT LIBRARY SYMBOL VARIABLE)
check_library_exists("${LIBRARY};${CURL_LIBS}" ${SYMBOL} "${CMAKE_LIBRARY_PATH}"
${VARIABLE})
if(${VARIABLE})
set(CURL_LIBS ${LIBRARY} ${CURL_LIBS})
endif(${VARIABLE})
endmacro(CHECK_LIBRARY_EXISTS_CONCAT)
# Check if header file exists and add it to the list.
# This macro is intended to be called multiple times with a sequence of
# possibly dependent header files. Some headers depend on others to be
# compiled correctly.
macro(CHECK_INCLUDE_FILE_CONCAT FILE VARIABLE)
check_include_files("${CURL_INCLUDES};${FILE}" ${VARIABLE})
if(${VARIABLE})
set(CURL_INCLUDES ${CURL_INCLUDES} ${FILE})
set(CURL_TEST_DEFINES "${CURL_TEST_DEFINES} -D${VARIABLE}")
endif(${VARIABLE})
endmacro(CHECK_INCLUDE_FILE_CONCAT)
# For other curl specific tests, use this macro.
macro(CURL_INTERNAL_TEST CURL_TEST)
if(NOT DEFINED "${CURL_TEST}")
set(MACRO_CHECK_FUNCTION_DEFINITIONS
"-D${CURL_TEST} ${CURL_TEST_DEFINES} ${CMAKE_REQUIRED_FLAGS}")
if(CMAKE_REQUIRED_LIBRARIES)
set(CURL_TEST_ADD_LIBRARIES
"-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
endif(CMAKE_REQUIRED_LIBRARIES)
message(STATUS "Performing Curl Test ${CURL_TEST}")
try_compile(${CURL_TEST}
${CMAKE_BINARY_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/CMake/CurlTests.c
CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=${MACRO_CHECK_FUNCTION_DEFINITIONS}
"${CURL_TEST_ADD_LIBRARIES}"
OUTPUT_VARIABLE OUTPUT)
if(${CURL_TEST})
set(${CURL_TEST} 1 CACHE INTERNAL "Curl test ${FUNCTION}")
message(STATUS "Performing Curl Test ${CURL_TEST} - Success")
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
"Performing Curl Test ${CURL_TEST} passed with the following output:\n"
"${OUTPUT}\n")
else(${CURL_TEST})
message(STATUS "Performing Curl Test ${CURL_TEST} - Failed")
set(${CURL_TEST} "" CACHE INTERNAL "Curl test ${FUNCTION}")
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Performing Curl Test ${CURL_TEST} failed with the following output:\n"
"${OUTPUT}\n")
endif(${CURL_TEST})
endif()
endmacro(CURL_INTERNAL_TEST)
macro(CURL_INTERNAL_TEST_RUN CURL_TEST)
if(NOT DEFINED "${CURL_TEST}_COMPILE")
set(MACRO_CHECK_FUNCTION_DEFINITIONS
"-D${CURL_TEST} ${CMAKE_REQUIRED_FLAGS}")
if(CMAKE_REQUIRED_LIBRARIES)
set(CURL_TEST_ADD_LIBRARIES
"-DLINK_LIBRARIES:STRING=${CMAKE_REQUIRED_LIBRARIES}")
endif(CMAKE_REQUIRED_LIBRARIES)
message(STATUS "Performing Curl Test ${CURL_TEST}")
try_run(${CURL_TEST} ${CURL_TEST}_COMPILE
${CMAKE_BINARY_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/CMake/CurlTests.c
CMAKE_FLAGS -DCOMPILE_DEFINITIONS:STRING=${MACRO_CHECK_FUNCTION_DEFINITIONS}
"${CURL_TEST_ADD_LIBRARIES}"
OUTPUT_VARIABLE OUTPUT)
if(${CURL_TEST}_COMPILE AND NOT ${CURL_TEST})
set(${CURL_TEST} 1 CACHE INTERNAL "Curl test ${FUNCTION}")
message(STATUS "Performing Curl Test ${CURL_TEST} - Success")
else(${CURL_TEST}_COMPILE AND NOT ${CURL_TEST})
message(STATUS "Performing Curl Test ${CURL_TEST} - Failed")
set(${CURL_TEST} "" CACHE INTERNAL "Curl test ${FUNCTION}")
file(APPEND "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log"
"Performing Curl Test ${CURL_TEST} failed with the following output:\n"
"${OUTPUT}")
if(${CURL_TEST}_COMPILE)
file(APPEND
"${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log"
"There was a problem running this test\n")
endif(${CURL_TEST}_COMPILE)
file(APPEND "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log"
"\n\n")
endif(${CURL_TEST}_COMPILE AND NOT ${CURL_TEST})
endif()
endmacro(CURL_INTERNAL_TEST_RUN)

View File

@ -1,35 +1,39 @@
include(CheckCSourceCompiles)
# The begin of the sources (macros and includes)
set(_source_epilogue "#undef inline")
include(CurlCheckCSourceCompiles)
set(EXTRA_DEFINES "__unused1\n#undef inline\n#define __unused2")
set(HEADER_INCLUDES)
set(headers_hack)
macro(add_header_include check header)
if(${check})
set(_source_epilogue "${_source_epilogue}\n#include <${header}>")
set(headers_hack
"${headers_hack}\n#include <${header}>")
#SET(HEADER_INCLUDES
# ${HEADER_INCLUDES}
# "${header}")
endif(${check})
endmacro(add_header_include)
set(signature_call_conv)
if(HAVE_WINDOWS_H)
add_header_include(HAVE_WINSOCK2_H "winsock2.h")
add_header_include(HAVE_WINDOWS_H "windows.h")
add_header_include(HAVE_WINSOCK2_H "winsock2.h")
add_header_include(HAVE_WINSOCK_H "winsock.h")
set(_source_epilogue
"${_source_epilogue}\n#ifndef WIN32_LEAN_AND_MEAN\n#define WIN32_LEAN_AND_MEAN\n#endif")
set(EXTRA_DEFINES ${EXTRA_DEFINES}
"__unused7\n#ifndef WIN32_LEAN_AND_MEAN\n#define WIN32_LEAN_AND_MEAN\n#endif\n#define __unused3")
set(signature_call_conv "PASCAL")
if(HAVE_LIBWS2_32)
set(CMAKE_REQUIRED_LIBRARIES ws2_32)
endif()
else(HAVE_WINDOWS_H)
add_header_include(HAVE_SYS_TYPES_H "sys/types.h")
add_header_include(HAVE_SYS_SOCKET_H "sys/socket.h")
endif(HAVE_WINDOWS_H)
check_c_source_compiles("${_source_epilogue}
int main(void) {
recv(0, 0, 0, 0);
return 0;
}" curl_cv_recv)
set(EXTRA_DEFINES_BACKUP "${EXTRA_DEFINES}")
set(EXTRA_DEFINES "${EXTRA_DEFINES_BACKUP}\n${headers_hack}\n${extern_line}\n#define __unused5")
curl_check_c_source_compiles("recv(0, 0, 0, 0)" curl_cv_recv)
if(curl_cv_recv)
# AC_CACHE_CHECK([types of arguments and return type for recv],
#[curl_cv_func_recv_args], [
#SET(curl_cv_func_recv_args "unknown")
#for recv_retv in 'int' 'ssize_t'; do
if(NOT DEFINED curl_cv_func_recv_args OR "${curl_cv_func_recv_args}" STREQUAL "unknown")
foreach(recv_retv "int" "ssize_t" )
foreach(recv_arg1 "int" "ssize_t" "SOCKET")
@ -37,23 +41,17 @@ if(curl_cv_recv)
foreach(recv_arg3 "size_t" "int" "socklen_t" "unsigned int")
foreach(recv_arg4 "int" "unsigned int")
if(NOT curl_cv_func_recv_done)
unset(curl_cv_func_recv_test CACHE)
check_c_source_compiles("
${_source_epilogue}
extern ${recv_retv} ${signature_call_conv}
recv(${recv_arg1}, ${recv_arg2}, ${recv_arg3}, ${recv_arg4});
int main(void) {
set(curl_cv_func_recv_test "UNKNOWN")
set(extern_line "extern ${recv_retv} ${signature_call_conv} recv(${recv_arg1}, ${recv_arg2}, ${recv_arg3}, ${recv_arg4})\;")
set(EXTRA_DEFINES "${EXTRA_DEFINES_BACKUP}\n${headers_hack}\n${extern_line}\n#define __unused5")
curl_check_c_source_compiles("
${recv_arg1} s=0;
${recv_arg2} buf=0;
${recv_arg3} len=0;
${recv_arg4} flags=0;
${recv_retv} res = recv(s, buf, len, flags);
(void) res;
return 0;
}"
curl_cv_func_recv_test)
message(STATUS
"Tested: ${recv_retv} recv(${recv_arg1}, ${recv_arg2}, ${recv_arg3}, ${recv_arg4})")
${recv_retv} res = recv(s, buf, len, flags)"
curl_cv_func_recv_test
"${recv_retv} recv(${recv_arg1}, ${recv_arg2}, ${recv_arg3}, ${recv_arg4})")
if(curl_cv_func_recv_test)
set(curl_cv_func_recv_args
"${recv_arg1},${recv_arg2},${recv_arg3},${recv_arg4},${recv_retv}")
@ -71,13 +69,18 @@ if(curl_cv_recv)
endforeach(recv_arg2)
endforeach(recv_arg1)
endforeach(recv_retv)
else()
else(NOT DEFINED curl_cv_func_recv_args OR "${curl_cv_func_recv_args}" STREQUAL "unknown")
string(REGEX REPLACE "^([^,]*),[^,]*,[^,]*,[^,]*,[^,]*$" "\\1" RECV_TYPE_ARG1 "${curl_cv_func_recv_args}")
string(REGEX REPLACE "^[^,]*,([^,]*),[^,]*,[^,]*,[^,]*$" "\\1" RECV_TYPE_ARG2 "${curl_cv_func_recv_args}")
string(REGEX REPLACE "^[^,]*,[^,]*,([^,]*),[^,]*,[^,]*$" "\\1" RECV_TYPE_ARG3 "${curl_cv_func_recv_args}")
string(REGEX REPLACE "^[^,]*,[^,]*,[^,]*,([^,]*),[^,]*$" "\\1" RECV_TYPE_ARG4 "${curl_cv_func_recv_args}")
string(REGEX REPLACE "^[^,]*,[^,]*,[^,]*,[^,]*,([^,]*)$" "\\1" RECV_TYPE_RETV "${curl_cv_func_recv_args}")
endif()
#MESSAGE("RECV_TYPE_ARG1 ${RECV_TYPE_ARG1}")
#MESSAGE("RECV_TYPE_ARG2 ${RECV_TYPE_ARG2}")
#MESSAGE("RECV_TYPE_ARG3 ${RECV_TYPE_ARG3}")
#MESSAGE("RECV_TYPE_ARG4 ${RECV_TYPE_ARG4}")
#MESSAGE("RECV_TYPE_RETV ${RECV_TYPE_RETV}")
endif(NOT DEFINED curl_cv_func_recv_args OR "${curl_cv_func_recv_args}" STREQUAL "unknown")
if("${curl_cv_func_recv_args}" STREQUAL "unknown")
message(FATAL_ERROR "Cannot find proper types to use for recv args")
@ -88,12 +91,12 @@ endif(curl_cv_recv)
set(curl_cv_func_recv_args "${curl_cv_func_recv_args}" CACHE INTERNAL "Arguments for recv")
set(HAVE_RECV 1)
check_c_source_compiles("${_source_epilogue}
int main(void) {
send(0, 0, 0, 0);
return 0;
}" curl_cv_send)
curl_check_c_source_compiles("send(0, 0, 0, 0)" curl_cv_send)
if(curl_cv_send)
# AC_CACHE_CHECK([types of arguments and return type for send],
#[curl_cv_func_send_args], [
#SET(curl_cv_func_send_args "unknown")
#for send_retv in 'int' 'ssize_t'; do
if(NOT DEFINED curl_cv_func_send_args OR "${curl_cv_func_send_args}" STREQUAL "unknown")
foreach(send_retv "int" "ssize_t" )
foreach(send_arg1 "int" "ssize_t" "SOCKET")
@ -101,24 +104,19 @@ if(curl_cv_send)
foreach(send_arg3 "size_t" "int" "socklen_t" "unsigned int")
foreach(send_arg4 "int" "unsigned int")
if(NOT curl_cv_func_send_done)
unset(curl_cv_func_send_test CACHE)
check_c_source_compiles("
${_source_epilogue}
extern ${send_retv} ${signature_call_conv}
send(${send_arg1}, ${send_arg2}, ${send_arg3}, ${send_arg4});
int main(void) {
set(curl_cv_func_send_test "UNKNOWN")
set(extern_line "extern ${send_retv} ${signature_call_conv} send(${send_arg1}, ${send_arg2}, ${send_arg3}, ${send_arg4})\;")
set(EXTRA_DEFINES "${EXTRA_DEFINES_BACKUP}\n${headers_hack}\n${extern_line}\n#define __unused5")
curl_check_c_source_compiles("
${send_arg1} s=0;
${send_arg2} buf=0;
${send_arg3} len=0;
${send_arg4} flags=0;
${send_retv} res = send(s, buf, len, flags);
(void) res;
return 0;
}"
curl_cv_func_send_test)
message(STATUS
"Tested: ${send_retv} send(${send_arg1}, ${send_arg2}, ${send_arg3}, ${send_arg4})")
${send_retv} res = send(s, buf, len, flags)"
curl_cv_func_send_test
"${send_retv} send(${send_arg1}, ${send_arg2}, ${send_arg3}, ${send_arg4})")
if(curl_cv_func_send_test)
#MESSAGE("Found arguments: ${curl_cv_func_send_test}")
string(REGEX REPLACE "(const) .*" "\\1" send_qual_arg2 "${send_arg2}")
string(REGEX REPLACE "const (.*)" "\\1" send_arg2 "${send_arg2}")
set(curl_cv_func_send_args
@ -137,14 +135,20 @@ if(curl_cv_send)
endforeach(send_arg2)
endforeach(send_arg1)
endforeach(send_retv)
else()
else(NOT DEFINED curl_cv_func_send_args OR "${curl_cv_func_send_args}" STREQUAL "unknown")
string(REGEX REPLACE "^([^,]*),[^,]*,[^,]*,[^,]*,[^,]*,[^,]*$" "\\1" SEND_TYPE_ARG1 "${curl_cv_func_send_args}")
string(REGEX REPLACE "^[^,]*,([^,]*),[^,]*,[^,]*,[^,]*,[^,]*$" "\\1" SEND_TYPE_ARG2 "${curl_cv_func_send_args}")
string(REGEX REPLACE "^[^,]*,[^,]*,([^,]*),[^,]*,[^,]*,[^,]*$" "\\1" SEND_TYPE_ARG3 "${curl_cv_func_send_args}")
string(REGEX REPLACE "^[^,]*,[^,]*,[^,]*,([^,]*),[^,]*,[^,]*$" "\\1" SEND_TYPE_ARG4 "${curl_cv_func_send_args}")
string(REGEX REPLACE "^[^,]*,[^,]*,[^,]*,[^,]*,([^,]*),[^,]*$" "\\1" SEND_TYPE_RETV "${curl_cv_func_send_args}")
string(REGEX REPLACE "^[^,]*,[^,]*,[^,]*,[^,]*,[^,]*,([^,]*)$" "\\1" SEND_QUAL_ARG2 "${curl_cv_func_send_args}")
endif()
#MESSAGE("SEND_TYPE_ARG1 ${SEND_TYPE_ARG1}")
#MESSAGE("SEND_TYPE_ARG2 ${SEND_TYPE_ARG2}")
#MESSAGE("SEND_TYPE_ARG3 ${SEND_TYPE_ARG3}")
#MESSAGE("SEND_TYPE_ARG4 ${SEND_TYPE_ARG4}")
#MESSAGE("SEND_TYPE_RETV ${SEND_TYPE_RETV}")
#MESSAGE("SEND_QUAL_ARG2 ${SEND_QUAL_ARG2}")
endif(NOT DEFINED curl_cv_func_send_args OR "${curl_cv_func_send_args}" STREQUAL "unknown")
if("${curl_cv_func_send_args}" STREQUAL "unknown")
message(FATAL_ERROR "Cannot find proper types to use for send args")
@ -156,71 +160,88 @@ endif(curl_cv_send)
set(curl_cv_func_send_args "${curl_cv_func_send_args}" CACHE INTERNAL "Arguments for send")
set(HAVE_SEND 1)
check_c_source_compiles("${_source_epilogue}
int main(void) {
int flag = MSG_NOSIGNAL;
(void)flag;
return 0;
}" HAVE_MSG_NOSIGNAL)
set(EXTRA_DEFINES "${EXTRA_DEFINES}\n${headers_hack}\n#define __unused5")
curl_check_c_source_compiles("int flag = MSG_NOSIGNAL" HAVE_MSG_NOSIGNAL)
if(NOT HAVE_WINDOWS_H)
set(EXTRA_DEFINES "__unused1\n#undef inline\n#define __unused2")
set(HEADER_INCLUDES)
set(headers_hack)
macro(add_header_include check header)
if(${check})
set(headers_hack
"${headers_hack}\n#include <${header}>")
#SET(HEADER_INCLUDES
# ${HEADER_INCLUDES}
# "${header}")
endif(${check})
endmacro(add_header_include header)
if(HAVE_WINDOWS_H)
set(EXTRA_DEFINES ${EXTRA_DEFINES}
"__unused7\n#ifndef WIN32_LEAN_AND_MEAN\n#define WIN32_LEAN_AND_MEAN\n#endif\n#define __unused3")
add_header_include(HAVE_WINDOWS_H "windows.h")
add_header_include(HAVE_WINSOCK2_H "winsock2.h")
add_header_include(HAVE_WINSOCK_H "winsock.h")
else(HAVE_WINDOWS_H)
add_header_include(HAVE_SYS_TYPES_H "sys/types.h")
add_header_include(HAVE_SYS_TIME_H "sys/time.h")
add_header_include(TIME_WITH_SYS_TIME "time.h")
add_header_include(HAVE_TIME_H "time.h")
endif()
check_c_source_compiles("${_source_epilogue}
int main(void) {
struct timeval ts;
ts.tv_sec = 0;
ts.tv_usec = 0;
(void)ts;
return 0;
}" HAVE_STRUCT_TIMEVAL)
endif(HAVE_WINDOWS_H)
set(EXTRA_DEFINES "${EXTRA_DEFINES}\n${headers_hack}\n#define __unused5")
curl_check_c_source_compiles("struct timeval ts;\nts.tv_sec = 0;\nts.tv_usec = 0" HAVE_STRUCT_TIMEVAL)
include(CheckCSourceRuns)
set(CMAKE_REQUIRED_FLAGS)
include(CurlCheckCSourceRuns)
set(EXTRA_DEFINES)
set(HEADER_INCLUDES)
if(HAVE_SYS_POLL_H)
set(CMAKE_REQUIRED_FLAGS "-DHAVE_SYS_POLL_H")
set(HEADER_INCLUDES "sys/poll.h")
endif(HAVE_SYS_POLL_H)
check_c_source_runs("
#ifdef HAVE_SYS_POLL_H
# include <sys/poll.h>
#endif
int main(void) {
return poll((void *)0, 0, 10 /*ms*/);
}" HAVE_POLL_FINE)
curl_check_c_source_runs("return poll((void *)0, 0, 10 /*ms*/)" HAVE_POLL_FINE)
set(HAVE_SIG_ATOMIC_T 1)
set(CMAKE_REQUIRED_FLAGS)
set(EXTRA_DEFINES)
set(HEADER_INCLUDES)
if(HAVE_SIGNAL_H)
set(CMAKE_REQUIRED_FLAGS "-DHAVE_SIGNAL_H")
set(HEADER_INCLUDES "signal.h")
set(CMAKE_EXTRA_INCLUDE_FILES "signal.h")
endif(HAVE_SIGNAL_H)
check_type_size("sig_atomic_t" SIZEOF_SIG_ATOMIC_T)
if(HAVE_SIZEOF_SIG_ATOMIC_T)
check_c_source_compiles("
#ifdef HAVE_SIGNAL_H
# include <signal.h>
#endif
int main(void) {
static volatile sig_atomic_t dummy = 0;
(void)dummy;
return 0;
}" HAVE_SIG_ATOMIC_T_NOT_VOLATILE)
curl_check_c_source_compiles("static volatile sig_atomic_t dummy = 0" HAVE_SIG_ATOMIC_T_NOT_VOLATILE)
if(NOT HAVE_SIG_ATOMIC_T_NOT_VOLATILE)
set(HAVE_SIG_ATOMIC_T_VOLATILE 1)
endif(NOT HAVE_SIG_ATOMIC_T_NOT_VOLATILE)
endif(HAVE_SIZEOF_SIG_ATOMIC_T)
set(CHECK_TYPE_SIZE_PREINCLUDE
"#undef inline")
if(HAVE_WINDOWS_H)
set(CMAKE_EXTRA_INCLUDE_FILES winsock2.h)
else()
set(CMAKE_EXTRA_INCLUDE_FILES)
set(CHECK_TYPE_SIZE_PREINCLUDE "${CHECK_TYPE_SIZE_PREINCLUDE}
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#include <windows.h>")
if(HAVE_WINSOCK2_H)
set(CHECK_TYPE_SIZE_PREINCLUDE "${CHECK_TYPE_SIZE_PREINCLUDE}\n#include <winsock2.h>")
endif(HAVE_WINSOCK2_H)
else(HAVE_WINDOWS_H)
if(HAVE_SYS_SOCKET_H)
set(CMAKE_EXTRA_INCLUDE_FILES sys/socket.h)
set(CMAKE_EXTRA_INCLUDE_FILES ${CMAKE_EXTRA_INCLUDE_FILES}
"sys/socket.h")
endif(HAVE_SYS_SOCKET_H)
endif()
if(HAVE_NETINET_IN_H)
set(CMAKE_EXTRA_INCLUDE_FILES ${CMAKE_EXTRA_INCLUDE_FILES}
"netinet/in.h")
endif(HAVE_NETINET_IN_H)
if(HAVE_ARPA_INET_H)
set(CMAKE_EXTRA_INCLUDE_FILES ${CMAKE_EXTRA_INCLUDE_FILES}
"arpa/inet.h")
endif(HAVE_ARPA_INET_H)
endif(HAVE_WINDOWS_H)
check_type_size("struct sockaddr_storage" SIZEOF_STRUCT_SOCKADDR_STORAGE)
if(HAVE_SIZEOF_STRUCT_SOCKADDR_STORAGE)

View File

@ -5,7 +5,6 @@ if(NOT UNIX)
set(HAVE_LIBSOCKET 0)
set(NOT_NEED_LIBNSL 0)
set(HAVE_LIBNSL 0)
set(HAVE_GETHOSTNAME 1)
set(HAVE_LIBZ 0)
set(HAVE_LIBCRYPTO 0)
@ -15,6 +14,7 @@ if(NOT UNIX)
set(HAVE_ARPA_INET_H 0)
set(HAVE_DLFCN_H 0)
set(HAVE_FCNTL_H 1)
set(HAVE_FEATURES_H 0)
set(HAVE_INTTYPES_H 0)
set(HAVE_IO_H 1)
set(HAVE_MALLOC_H 1)
@ -108,11 +108,7 @@ if(NOT UNIX)
set(HAVE_IN_ADDR_T 0)
set(HAVE_INET_NTOA_R_DECL 0)
set(HAVE_INET_NTOA_R_DECL_REENTRANT 0)
if(ENABLE_IPV6)
set(HAVE_GETADDRINFO 1)
else()
set(HAVE_GETADDRINFO 0)
endif()
set(HAVE_GETADDRINFO 0)
set(STDC_HEADERS 1)
set(RETSIGTYPE_TEST 1)

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,6 @@
COPYRIGHT AND PERMISSION NOTICE
Copyright (c) 1996 - 2016, Daniel Stenberg, <daniel@haxx.se>, and many
contributors, see the THANKS file.
Copyright (c) 1996 - 2011, Daniel Stenberg, <daniel@haxx.se>.
All rights reserved.

View File

@ -48,9 +48,9 @@ installed:
o nroff + perl
If you don't have nroff and perl and you for some reason don't want to
install them, you can rename the source file src/tool_hugehelp.c.cvs to
src/tool_hugehelp.c and avoid having to generate this file. This will
give you a stubbed version of the file that doesn't contain actual content.
install them, you can rename the source file src/hugehelp.c.cvs to
src/hugehelp.c and avoid having to generate this file. This will give you
a stubbed version of the file that doesn't contain actual content.
MAC OS X

View File

@ -10,10 +10,6 @@
# 10.5 is the *ONLY* SDK that support PPC64 :( -- 10.6 do not have ppc64 support
#If you need to have PPC64 support then change below to 1
PPC64_NEEDED=0
# Apple does not support building for PPC anymore in Xcode 4 and later.
# If you're using Xcode 3 or earlier and need PPC support, then change
# the setting below to 1
PPC_NEEDED=0
# For me the default is to develop for the platform I am on, and if you
#desire compatibility with older versions then change USE_OLD to 1 :)
@ -28,16 +24,9 @@ FRAMEWORK_VERSION=Versions/Release-$VERSION
# and setup the right paths to this version, leaving the system version
# "intact", so you can "fix" it later with the links to Versions/A/...
DEVELOPER_PATH=`xcode-select --print-path`
# Around Xcode 4.3, SDKs were moved from the Developer folder into the
# MacOSX.platform folder
if test -d "$DEVELOPER_PATH/Platforms/MacOSX.platform/Developer/SDKs"; then
SDK_PATH="$DEVELOPER_PATH/Platforms/MacOSX.platform/Developer/SDKs"
else
SDK_PATH="$DEVELOPER_PATH/SDKs";
fi
OLD_SDK=`ls $SDK_PATH|head -1`
NEW_SDK=`ls -r $SDK_PATH|head -1`
OLD_SDK=`ls /Developer/SDKs|head -1`
NEW_SDK=`ls -r /Developer/SDKs|head -1`
if test "0"$USE_OLD -gt 0
then
@ -48,24 +37,21 @@ fi
MACVER=`echo $SDK32|sed -e s/[a-zA-Z]//g -e s/.\$//`
SDK32_DIR=$SDK_PATH/$SDK32
SDK32_DIR='/Developer/SDKs/'$SDK32
MINVER32='-mmacosx-version-min='$MACVER
if test $PPC_NEEDED -gt 0; then
ARCHES32='-arch i386 -arch ppc'
else
ARCHES32='-arch i386'
fi
ARCHES32='-arch i386 -arch ppc'
if test $PPC64_NEEDED -gt 0
then
SDK64=10.5
ARCHES64='-arch x86_64 -arch ppc64'
SDK64=`ls $SDK_PATH|grep 10.5|head -1`
SDK64=`ls /Developer/SDKs|grep 10.5|head -1`
else
ARCHES64='-arch x86_64'
#We "know" that 10.4 and earlier do not support 64bit
OLD_SDK64=`ls $SDK_PATH|egrep -v "10.[0-4]"|head -1`
NEW_SDK64=`ls -r $SDK_PATH|egrep -v "10.[0-4][^0-9]" | head -1`
OLD_SDK64=`ls /Developer/SDKs|egrep -v "10.[0-4]"|head -1`
NEW_SDK64=`ls -r /Developer/SDKs|egrep -v "10.[0-4]"|head -1`
if test $USE_OLD -gt 0
then
SDK64=$OLD_SDK64
@ -74,7 +60,7 @@ else
fi
fi
SDK64_DIR=$SDK_PATH/$SDK64
SDK64_DIR='/Developer/SDKs/'$SDK64
MACVER64=`echo $SDK64|sed -e s/[a-zA-Z]//g -e s/.\$//`
MINVER64='-mmacosx-version-min='$MACVER64
@ -82,19 +68,19 @@ MINVER64='-mmacosx-version-min='$MACVER64
if test ! -z $SDK32; then
echo "----Configuring libcurl for 32 bit universal framework..."
make clean
./configure --disable-dependency-tracking --disable-static --with-gssapi --with-darwinssl \
CFLAGS="-Os -isysroot $SDK32_DIR $ARCHES32" \
LDFLAGS="-Wl,-syslibroot,$SDK32_DIR $ARCHES32 -Wl,-headerpad_max_install_names" \
./configure --disable-dependency-tracking --disable-static --with-gssapi \
CFLAGS="-Os -isysroot $SDK32_DIR $ARCHES32 $MINVER32" \
LDFLAGS="-Wl,-syslibroot,$SDK32_DIR $ARCHES32 $MINVER32 -Wl,-headerpad_max_install_names" \
CC=$CC
echo "----Building 32 bit libcurl..."
make -j `sysctl -n hw.logicalcpu_max`
make
echo "----Creating 32 bit framework..."
rm -r libcurl.framework
mkdir -p libcurl.framework/${FRAMEWORK_VERSION}/Resources
cp lib/.libs/libcurl.dylib libcurl.framework/${FRAMEWORK_VERSION}/libcurl
install_name_tool -id @rpath/libcurl.framework/${FRAMEWORK_VERSION}/libcurl libcurl.framework/${FRAMEWORK_VERSION}/libcurl
install_name_tool -id @executable_path/../Frameworks/libcurl.framework/${FRAMEWORK_VERSION}/libcurl libcurl.framework/${FRAMEWORK_VERSION}/libcurl
/usr/bin/sed -e "s/7\.12\.3/$VERSION/" lib/libcurl.plist >libcurl.framework/${FRAMEWORK_VERSION}/Resources/Info.plist
mkdir -p libcurl.framework/${FRAMEWORK_VERSION}/Headers/curl
cp include/curl/*.h libcurl.framework/${FRAMEWORK_VERSION}/Headers/curl
@ -103,25 +89,25 @@ if test ! -z $SDK32; then
ln -fs ${FRAMEWORK_VERSION}/Resources Resources
ln -fs ${FRAMEWORK_VERSION}/Headers Headers
cd Versions
ln -fs $(basename "${FRAMEWORK_VERSION}") Current
ln -fs ${FRAMEWORK_VERSION} Current
echo Testing for SDK64
echo TEsting for SDK64
if test -d $SDK64_DIR; then
echo entering...
popd
make clean
echo "----Configuring libcurl for 64 bit universal framework..."
./configure --disable-dependency-tracking --disable-static --with-gssapi --with-darwinssl \
CFLAGS="-Os -isysroot $SDK64_DIR $ARCHES64" \
LDFLAGS="-Wl,-syslibroot,$SDK64_DIR $ARCHES64 -Wl,-headerpad_max_install_names" \
./configure --disable-dependency-tracking --disable-static --with-gssapi \
CFLAGS="-Os -isysroot $SDK64_DIR $ARCHES64 $MINVER64" \
LDFLAGS="-Wl,-syslibroot,$SDK64_DIR $ARCHES64 $MINVER64 -Wl,-headerpad_max_install_names" \
CC=$CC
echo "----Building 64 bit libcurl..."
make -j `sysctl -n hw.logicalcpu_max`
make
echo "----Appending 64 bit framework to 32 bit framework..."
cp lib/.libs/libcurl.dylib libcurl.framework/${FRAMEWORK_VERSION}/libcurl64
install_name_tool -id @rpath/libcurl.framework/${FRAMEWORK_VERSION}/libcurl libcurl.framework/${FRAMEWORK_VERSION}/libcurl64
install_name_tool -id @executable_path/../Frameworks/libcurl.framework/${FRAMEWORK_VERSION}/libcurl libcurl.framework/${FRAMEWORK_VERSION}/libcurl64
cp libcurl.framework/${FRAMEWORK_VERSION}/libcurl libcurl.framework/${FRAMEWORK_VERSION}/libcurl32
pwd
lipo libcurl.framework/${FRAMEWORK_VERSION}/libcurl32 libcurl.framework/${FRAMEWORK_VERSION}/libcurl64 -create -output libcurl.framework/${FRAMEWORK_VERSION}/libcurl

View File

@ -5,11 +5,11 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
@ -24,141 +24,28 @@ AUTOMAKE_OPTIONS = foreign
ACLOCAL_AMFLAGS = -I m4
CMAKE_DIST = CMakeLists.txt CMake/CMakeConfigurableFile.in \
CMake/CurlTests.c CMake/FindGSS.cmake CMake/OtherTests.cmake \
CMake/Platforms/WindowsCache.cmake CMake/Utilities.cmake \
include/curl/curlbuild.h.cmake CMake/Macros.cmake
VC6_LIBTMPL = projects/Windows/VC6/lib/libcurl.tmpl
VC6_LIBDSP = projects/Windows/VC6/lib/libcurl.dsp.dist
VC6_LIBDSP_DEPS = $(VC6_LIBTMPL) Makefile.am lib/Makefile.inc
VC6_SRCTMPL = projects/Windows/VC6/src/curl.tmpl
VC6_SRCDSP = projects/Windows/VC6/src/curl.dsp.dist
VC6_SRCDSP_DEPS = $(VC6_SRCTMPL) Makefile.am src/Makefile.inc
VC7_LIBTMPL = projects/Windows/VC7/lib/libcurl.tmpl
VC7_LIBVCPROJ = projects/Windows/VC7/lib/libcurl.vcproj.dist
VC7_LIBVCPROJ_DEPS = $(VC7_LIBTMPL) Makefile.am lib/Makefile.inc
VC7_SRCTMPL = projects/Windows/VC7/src/curl.tmpl
VC7_SRCVCPROJ = projects/Windows/VC7/src/curl.vcproj.dist
VC7_SRCVCPROJ_DEPS = $(VC7_SRCTMPL) Makefile.am src/Makefile.inc
VC71_LIBTMPL = projects/Windows/VC7.1/lib/libcurl.tmpl
VC71_LIBVCPROJ = projects/Windows/VC7.1/lib/libcurl.vcproj.dist
VC71_LIBVCPROJ_DEPS = $(VC71_LIBTMPL) Makefile.am lib/Makefile.inc
VC71_SRCTMPL = projects/Windows/VC7.1/src/curl.tmpl
VC71_SRCVCPROJ = projects/Windows/VC7.1/src/curl.vcproj.dist
VC71_SRCVCPROJ_DEPS = $(VC71_SRCTMPL) Makefile.am src/Makefile.inc
VC8_LIBTMPL = projects/Windows/VC8/lib/libcurl.tmpl
VC8_LIBVCPROJ = projects/Windows/VC8/lib/libcurl.vcproj.dist
VC8_LIBVCPROJ_DEPS = $(VC8_LIBTMPL) Makefile.am lib/Makefile.inc
VC8_SRCTMPL = projects/Windows/VC8/src/curl.tmpl
VC8_SRCVCPROJ = projects/Windows/VC8/src/curl.vcproj.dist
VC8_SRCVCPROJ_DEPS = $(VC8_SRCTMPL) Makefile.am src/Makefile.inc
VC9_LIBTMPL = projects/Windows/VC9/lib/libcurl.tmpl
VC9_LIBVCPROJ = projects/Windows/VC9/lib/libcurl.vcproj.dist
VC9_LIBVCPROJ_DEPS = $(VC9_LIBTMPL) Makefile.am lib/Makefile.inc
VC9_SRCTMPL = projects/Windows/VC9/src/curl.tmpl
VC9_SRCVCPROJ = projects/Windows/VC9/src/curl.vcproj.dist
VC9_SRCVCPROJ_DEPS = $(VC9_SRCTMPL) Makefile.am src/Makefile.inc
VC10_LIBTMPL = projects/Windows/VC10/lib/libcurl.tmpl
VC10_LIBVCXPROJ = projects/Windows/VC10/lib/libcurl.vcxproj.dist
VC10_LIBVCXPROJ_DEPS = $(VC10_LIBTMPL) Makefile.am lib/Makefile.inc
VC10_SRCTMPL = projects/Windows/VC10/src/curl.tmpl
VC10_SRCVCXPROJ = projects/Windows/VC10/src/curl.vcxproj.dist
VC10_SRCVCXPROJ_DEPS = $(VC10_SRCTMPL) Makefile.am src/Makefile.inc
VC11_LIBTMPL = projects/Windows/VC11/lib/libcurl.tmpl
VC11_LIBVCXPROJ = projects/Windows/VC11/lib/libcurl.vcxproj.dist
VC11_LIBVCXPROJ_DEPS = $(VC11_LIBTMPL) Makefile.am lib/Makefile.inc
VC11_SRCTMPL = projects/Windows/VC11/src/curl.tmpl
VC11_SRCVCXPROJ = projects/Windows/VC11/src/curl.vcxproj.dist
VC11_SRCVCXPROJ_DEPS = $(VC11_SRCTMPL) Makefile.am src/Makefile.inc
VC12_LIBTMPL = projects/Windows/VC12/lib/libcurl.tmpl
VC12_LIBVCXPROJ = projects/Windows/VC12/lib/libcurl.vcxproj.dist
VC12_LIBVCXPROJ_DEPS = $(VC12_LIBTMPL) Makefile.am lib/Makefile.inc
VC12_SRCTMPL = projects/Windows/VC12/src/curl.tmpl
VC12_SRCVCXPROJ = projects/Windows/VC12/src/curl.vcxproj.dist
VC12_SRCVCXPROJ_DEPS = $(VC12_SRCTMPL) Makefile.am src/Makefile.inc
VC14_LIBTMPL = projects/Windows/VC14/lib/libcurl.tmpl
VC14_LIBVCXPROJ = projects/Windows/VC14/lib/libcurl.vcxproj.dist
VC14_LIBVCXPROJ_DEPS = $(VC14_LIBTMPL) Makefile.am lib/Makefile.inc
VC14_SRCTMPL = projects/Windows/VC14/src/curl.tmpl
VC14_SRCVCXPROJ = projects/Windows/VC14/src/curl.vcxproj.dist
VC14_SRCVCXPROJ_DEPS = $(VC14_SRCTMPL) Makefile.am src/Makefile.inc
VC_DIST = projects/README \
projects/build-openssl.bat \
projects/build-wolfssl.bat \
projects/checksrc.bat \
projects/Windows/VC6/curl-all.dsw \
projects/Windows/VC6/lib/libcurl.dsw \
projects/Windows/VC6/src/curl.dsw \
projects/Windows/VC7/curl-all.sln \
projects/Windows/VC7/lib/libcurl.sln \
projects/Windows/VC7/src/curl.sln \
projects/Windows/VC7.1/curl-all.sln \
projects/Windows/VC7.1/lib/libcurl.sln \
projects/Windows/VC7.1/src/curl.sln \
projects/Windows/VC8/curl-all.sln \
projects/Windows/VC8/lib/libcurl.sln \
projects/Windows/VC8/src/curl.sln \
projects/Windows/VC9/curl-all.sln \
projects/Windows/VC9/lib/libcurl.sln \
projects/Windows/VC9/src/curl.sln \
projects/Windows/VC10/curl-all.sln \
projects/Windows/VC10/lib/libcurl.sln \
projects/Windows/VC10/lib/libcurl.vcxproj.filters \
projects/Windows/VC10/src/curl.sln \
projects/Windows/VC10/src/curl.vcxproj.filters \
projects/Windows/VC11/curl-all.sln \
projects/Windows/VC11/lib/libcurl.sln \
projects/Windows/VC11/lib/libcurl.vcxproj.filters \
projects/Windows/VC11/src/curl.sln \
projects/Windows/VC11/src/curl.vcxproj.filters \
projects/Windows/VC12/curl-all.sln \
projects/Windows/VC12/lib/libcurl.sln \
projects/Windows/VC12/lib/libcurl.vcxproj.filters \
projects/Windows/VC12/src/curl.sln \
projects/Windows/VC12/src/curl.vcxproj.filters \
projects/Windows/VC14/curl-all.sln \
projects/Windows/VC14/lib/libcurl.sln \
projects/Windows/VC14/lib/libcurl.vcxproj.filters \
projects/Windows/VC14/src/curl.sln \
projects/Windows/VC14/src/curl.vcxproj.filters
CMAKE_DIST = CMakeLists.txt CMake/CMakeConfigurableFile.in \
CMake/CurlCheckCSourceCompiles.cmake CMake/CurlCheckCSourceRuns.cmake \
CMake/CurlTests.c CMake/FindOpenSSL.cmake CMake/FindZLIB.cmake \
CMake/OtherTests.cmake CMake/Platforms/WindowsCache.cmake \
CMake/Utilities.cmake include/curl/curlbuild.h.cmake
WINBUILD_DIST = winbuild/BUILD.WINDOWS.txt winbuild/gen_resp_file.bat \
winbuild/MakefileBuild.vc winbuild/Makefile.vc \
winbuild/Makefile.msvc.names
winbuild/MakefileBuild.vc winbuild/Makefile.vc
EXTRA_DIST = CHANGES COPYING maketgz Makefile.dist curl-config.in \
RELEASE-NOTES buildconf libcurl.pc.in MacOSX-Framework scripts/zsh.pl \
$(CMAKE_DIST) $(VC_DIST) $(WINBUILD_DIST) lib/libcurl.vers.in \
buildconf.bat
CLEANFILES = $(VC6_LIBDSP) $(VC6_SRCDSP) $(VC7_LIBVCPROJ) $(VC7_SRCVCPROJ) \
$(VC71_LIBVCPROJ) $(VC71_SRCVCPROJ) $(VC8_LIBVCPROJ) $(VC8_SRCVCPROJ) \
$(VC9_LIBVCPROJ) $(VC9_SRCVCPROJ) $(VC10_LIBVCXPROJ) $(VC10_SRCVCXPROJ) \
$(VC11_LIBVCXPROJ) $(VC11_SRCVCXPROJ) $(VC12_LIBVCXPROJ) $(VC12_SRCVCXPROJ) \
$(VC14_LIBVCXPROJ) $(VC14_SRCVCXPROJ)
curl-style.el sample.emacs RELEASE-NOTES buildconf \
libcurl.pc.in vc6curl.dsw MacOSX-Framework Android.mk $(CMAKE_DIST) \
Makefile.msvc.names $(WINBUILD_DIST)
bin_SCRIPTS = curl-config
SUBDIRS = lib src include scripts
DIST_SUBDIRS = $(SUBDIRS) tests packages docs
SUBDIRS = lib src
DIST_SUBDIRS = $(SUBDIRS) tests include packages docs
pkgconfigdir = $(libdir)/pkgconfig
pkgconfig_DATA = libcurl.pc
# List of files required to generate VC IDE .dsp, .vcproj and .vcxproj files
include lib/Makefile.inc
include src/Makefile.inc
dist-hook:
rm -rf $(top_builddir)/tests/log
find $(distdir) -name "*.dist" -exec rm {} \;
@ -169,12 +56,12 @@ dist-hook:
done)
html:
cd docs && make html
cd docs; make html
pdf:
cd docs && make pdf
cd docs; make pdf
check: test examples check-docs
check: test examples
if CROSSCOMPILING
test-full: test
@ -194,24 +81,14 @@ test-full:
test-torture:
@(cd tests; $(MAKE) all torture-test)
test-am:
@(cd tests; $(MAKE) all am-test)
endif
examples:
@(cd docs/examples; $(MAKE) check)
check-docs:
@(cd docs/libcurl; $(MAKE) check)
# This is a hook to have 'make clean' also clean up the docs and the tests
# dir. The extra check for the Makefiles being present is necessary because
# 'make distcheck' will make clean first in these directories _before_ it runs
# this hook.
clean-local:
@(if test -f tests/Makefile; then cd tests; $(MAKE) clean; fi)
@(if test -f docs/Makefile; then cd docs; $(MAKE) clean; fi)
@(cd tests; $(MAKE) clean)
@(cd docs; $(MAKE) clean)
#
# Build source and binary rpms. For rpm-3.0 and above, the ~/.rpmmacros
@ -272,301 +149,9 @@ uninstall-hook:
cd docs && $(MAKE) uninstall
ca-bundle: lib/mk-ca-bundle.pl
@echo "generating a fresh ca-bundle.crt"
@echo "generate a fresh ca-bundle.crt"
@perl $< -b -l -u lib/ca-bundle.crt
ca-firefox: lib/firefox-db2pem.sh
@echo "generating a fresh ca-bundle.crt"
@echo "generate a fresh ca-bundle.crt"
./lib/firefox-db2pem.sh lib/ca-bundle.crt
checksrc:
cd lib && $(MAKE) checksrc
cd src && $(MAKE) checksrc
.PHONY: vc-ide
vc-ide: $(VC6_LIBDSP_DEPS) $(VC6_SRCDSP_DEPS) $(VC7_LIBVCPROJ_DEPS) \
$(VC7_SRCVCPROJ_DEPS) $(VC71_LIBVCPROJ_DEPS) $(VC71_SRCVCPROJ_DEPS) \
$(VC8_LIBVCPROJ_DEPS) $(VC8_SRCVCPROJ_DEPS) $(VC9_LIBVCPROJ_DEPS) \
$(VC9_SRCVCPROJ_DEPS) $(VC10_LIBVCXPROJ_DEPS) $(VC10_SRCVCXPROJ_DEPS) \
$(VC11_LIBVCXPROJ_DEPS) $(VC11_SRCVCXPROJ_DEPS) $(VC12_LIBVCXPROJ_DEPS) \
$(VC12_SRCVCXPROJ_DEPS) $(VC14_LIBVCXPROJ_DEPS) $(VC14_SRCVCXPROJ_DEPS)
@(win32_lib_srcs='$(LIB_CFILES)'; \
win32_lib_hdrs='$(LIB_HFILES) config-win32.h'; \
win32_lib_rc='$(LIB_RCFILES)'; \
win32_lib_vtls_srcs='$(LIB_VTLS_CFILES)'; \
win32_lib_vtls_hdrs='$(LIB_VTLS_HFILES)'; \
win32_src_srcs='$(CURL_CFILES)'; \
win32_src_hdrs='$(CURL_HFILES)'; \
win32_src_rc='$(CURL_RCFILES)'; \
win32_src_x_srcs='$(CURLX_CFILES)'; \
win32_src_x_hdrs='$(CURLX_HFILES) ../lib/config-win32.h'; \
\
sorted_lib_srcs=`for file in $$win32_lib_srcs; do echo $$file; done | sort`; \
sorted_lib_hdrs=`for file in $$win32_lib_hdrs; do echo $$file; done | sort`; \
sorted_lib_vtls_srcs=`for file in $$win32_lib_vtls_srcs; do echo $$file; done | sort`; \
sorted_lib_vtls_hdrs=`for file in $$win32_lib_vtls_hdrs; do echo $$file; done | sort`; \
sorted_src_srcs=`for file in $$win32_src_srcs; do echo $$file; done | sort`; \
sorted_src_hdrs=`for file in $$win32_src_hdrs; do echo $$file; done | sort`; \
sorted_src_x_srcs=`for file in $$win32_src_x_srcs; do echo $$file; done | sort`; \
sorted_src_x_hdrs=`for file in $$win32_src_x_hdrs; do echo $$file; done | sort`; \
\
awk_code='\
function gen_element(type, dir, file)\
{\
sub(/vtls\//, "", file);\
\
spaces=" ";\
if(dir == "lib\\vtls")\
tabs=" ";\
else\
tabs=" ";\
\
if(type == "dsp") {\
printf("# Begin Source File\r\n");\
printf("\r\n");\
printf("SOURCE=..\\..\\..\\..\\%s\\%s\r\n", dir, file);\
printf("# End Source File\r\n");\
}\
else if(type == "vcproj1") {\
printf("%s<File\r\n", tabs);\
printf("%s RelativePath=\"..\\..\\..\\..\\%s\\%s\">\r\n",\
tabs, dir, file);\
printf("%s</File>\r\n", tabs);\
}\
else if(type == "vcproj2") {\
printf("%s<File\r\n", tabs);\
printf("%s RelativePath=\"..\\..\\..\\..\\%s\\%s\"\r\n",\
tabs, dir, file);\
printf("%s>\r\n", tabs);\
printf("%s</File>\r\n", tabs);\
}\
else if(type == "vcxproj") {\
i = index(file, ".");\
ext = substr(file, i == 0 ? 0 : i + 1);\
\
if(ext == "c")\
printf("%s<ClCompile Include=\"..\\..\\..\\..\\%s\\%s\" />\r\n",\
spaces, dir, file);\
else if(ext == "h")\
printf("%s<ClInclude Include=\"..\\..\\..\\..\\%s\\%s\" />\r\n",\
spaces, dir, file);\
else if(ext == "rc")\
printf("%s<ResourceCompile Include=\"..\\..\\..\\..\\%s\\%s\" />\r\n",\
spaces, dir, file);\
}\
}\
\
{\
\
if($$0 == "CURL_LIB_C_FILES") {\
split(lib_srcs, arr);\
for(val in arr) gen_element(proj_type, "lib", arr[val]);\
}\
else if($$0 == "CURL_LIB_H_FILES") {\
split(lib_hdrs, arr);\
for(val in arr) gen_element(proj_type, "lib", arr[val]);\
}\
else if($$0 == "CURL_LIB_RC_FILES") {\
split(lib_rc, arr);\
for(val in arr) gen_element(proj_type, "lib", arr[val]);\
}\
else if($$0 == "CURL_LIB_VTLS_C_FILES") {\
split(lib_vtls_srcs, arr);\
for(val in arr) gen_element(proj_type, "lib\\vtls", arr[val]);\
}\
else if($$0 == "CURL_LIB_VTLS_H_FILES") {\
split(lib_vtls_hdrs, arr);\
for(val in arr) gen_element(proj_type, "lib\\vtls", arr[val]);\
}\
else if($$0 == "CURL_SRC_C_FILES") {\
split(src_srcs, arr);\
for(val in arr) gen_element(proj_type, "src", arr[val]);\
}\
else if($$0 == "CURL_SRC_H_FILES") {\
split(src_hdrs, arr);\
for(val in arr) gen_element(proj_type, "src", arr[val]);\
}\
else if($$0 == "CURL_SRC_RC_FILES") {\
split(src_rc, arr);\
for(val in arr) gen_element(proj_type, "src", arr[val]);\
}\
else if($$0 == "CURL_SRC_X_C_FILES") {\
split(src_x_srcs, arr);\
for(val in arr) {\
sub(/..\/lib\//, "", arr[val]);\
gen_element(proj_type, "lib", arr[val]);\
}\
}\
else if($$0 == "CURL_SRC_X_H_FILES") {\
split(src_x_hdrs, arr);\
for(val in arr) {\
sub(/..\/lib\//, "", arr[val]);\
gen_element(proj_type, "lib", arr[val]);\
}\
}\
else\
printf("%s\r\n", $$0);\
}';\
\
echo "generating '$(VC6_LIBDSP)'"; \
awk -v proj_type=dsp \
-v lib_srcs="$$sorted_lib_srcs" \
-v lib_hdrs="$$sorted_lib_hdrs" \
-v lib_rc="$$win32_lib_rc" \
-v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \
-v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \
"$$awk_code" $(srcdir)/$(VC6_LIBTMPL) > $(VC6_LIBDSP) || { exit 1; }; \
\
echo "generating '$(VC6_SRCDSP)'"; \
awk -v proj_type=dsp \
-v src_srcs="$$sorted_src_srcs" \
-v src_hdrs="$$sorted_src_hdrs" \
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC6_SRCTMPL) > $(VC6_SRCDSP) || { exit 1; }; \
\
echo "generating '$(VC7_LIBVCPROJ)'"; \
awk -v proj_type=vcproj1 \
-v lib_srcs="$$sorted_lib_srcs" \
-v lib_hdrs="$$sorted_lib_hdrs" \
-v lib_rc="$$win32_lib_rc" \
-v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \
-v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \
"$$awk_code" $(srcdir)/$(VC7_LIBTMPL) > $(VC7_LIBVCPROJ) || { exit 1; }; \
\
echo "generating '$(VC7_SRCVCPROJ)'"; \
awk -v proj_type=vcproj1 \
-v src_srcs="$$sorted_src_srcs" \
-v src_hdrs="$$sorted_src_hdrs" \
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC7_SRCTMPL) > $(VC7_SRCVCPROJ) || { exit 1; }; \
\
echo "generating '$(VC71_LIBVCPROJ)'"; \
awk -v proj_type=vcproj1 \
-v lib_srcs="$$sorted_lib_srcs" \
-v lib_hdrs="$$sorted_lib_hdrs" \
-v lib_rc="$$win32_lib_rc" \
-v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \
-v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \
"$$awk_code" $(srcdir)/$(VC71_LIBTMPL) > $(VC71_LIBVCPROJ) || { exit 1; }; \
\
echo "generating '$(VC71_SRCVCPROJ)'"; \
awk -v proj_type=vcproj1 \
-v src_srcs="$$sorted_src_srcs" \
-v src_hdrs="$$sorted_src_hdrs" \
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC71_SRCTMPL) > $(VC71_SRCVCPROJ) || { exit 1; }; \
\
echo "generating '$(VC8_LIBVCPROJ)'"; \
awk -v proj_type=vcproj2 \
-v lib_srcs="$$sorted_lib_srcs" \
-v lib_hdrs="$$sorted_lib_hdrs" \
-v lib_rc="$$win32_lib_rc" \
-v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \
-v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \
"$$awk_code" $(srcdir)/$(VC8_LIBTMPL) > $(VC8_LIBVCPROJ) || { exit 1; }; \
\
echo "generating '$(VC8_SRCVCPROJ)'"; \
awk -v proj_type=vcproj2 \
-v src_srcs="$$sorted_src_srcs" \
-v src_hdrs="$$sorted_src_hdrs" \
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC8_SRCTMPL) > $(VC8_SRCVCPROJ) || { exit 1; }; \
\
echo "generating '$(VC9_LIBVCPROJ)'"; \
awk -v proj_type=vcproj2 \
-v lib_srcs="$$sorted_lib_srcs" \
-v lib_hdrs="$$sorted_lib_hdrs" \
-v lib_rc="$$win32_lib_rc" \
-v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \
-v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \
"$$awk_code" $(srcdir)/$(VC9_LIBTMPL) > $(VC9_LIBVCPROJ) || { exit 1; }; \
\
echo "generating '$(VC9_SRCVCPROJ)'"; \
awk -v proj_type=vcproj2 \
-v src_srcs="$$sorted_src_srcs" \
-v src_hdrs="$$sorted_src_hdrs" \
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC9_SRCTMPL) > $(VC9_SRCVCPROJ) || { exit 1; }; \
\
echo "generating '$(VC10_LIBVCXPROJ)'"; \
awk -v proj_type=vcxproj \
-v lib_srcs="$$sorted_lib_srcs" \
-v lib_hdrs="$$sorted_lib_hdrs" \
-v lib_rc="$$win32_lib_rc" \
-v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \
-v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \
"$$awk_code" $(srcdir)/$(VC10_LIBTMPL) > $(VC10_LIBVCXPROJ) || { exit 1; }; \
\
echo "generating '$(VC10_SRCVCXPROJ)'"; \
awk -v proj_type=vcxproj \
-v src_srcs="$$sorted_src_srcs" \
-v src_hdrs="$$sorted_src_hdrs" \
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC10_SRCTMPL) > $(VC10_SRCVCXPROJ) || { exit 1; }; \
\
echo "generating '$(VC11_LIBVCXPROJ)'"; \
awk -v proj_type=vcxproj \
-v lib_srcs="$$sorted_lib_srcs" \
-v lib_hdrs="$$sorted_lib_hdrs" \
-v lib_rc="$$win32_lib_rc" \
-v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \
-v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \
"$$awk_code" $(srcdir)/$(VC11_LIBTMPL) > $(VC11_LIBVCXPROJ) || { exit 1; }; \
\
echo "generating '$(VC11_SRCVCXPROJ)'"; \
awk -v proj_type=vcxproj \
-v src_srcs="$$sorted_src_srcs" \
-v src_hdrs="$$sorted_src_hdrs" \
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC11_SRCTMPL) > $(VC11_SRCVCXPROJ) || { exit 1; }; \
\
echo "generating '$(VC12_LIBVCXPROJ)'"; \
awk -v proj_type=vcxproj \
-v lib_srcs="$$sorted_lib_srcs" \
-v lib_hdrs="$$sorted_lib_hdrs" \
-v lib_rc="$$win32_lib_rc" \
-v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \
-v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \
"$$awk_code" $(srcdir)/$(VC12_LIBTMPL) > $(VC12_LIBVCXPROJ) || { exit 1; }; \
\
echo "generating '$(VC12_SRCVCXPROJ)'"; \
awk -v proj_type=vcxproj \
-v src_srcs="$$sorted_src_srcs" \
-v src_hdrs="$$sorted_src_hdrs" \
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC12_SRCTMPL) > $(VC12_SRCVCXPROJ) || { exit 1; }; \
\
echo "generating '$(VC14_LIBVCXPROJ)'"; \
awk -v proj_type=vcxproj \
-v lib_srcs="$$sorted_lib_srcs" \
-v lib_hdrs="$$sorted_lib_hdrs" \
-v lib_rc="$$win32_lib_rc" \
-v lib_vtls_srcs="$$sorted_lib_vtls_srcs" \
-v lib_vtls_hdrs="$$sorted_lib_vtls_hdrs" \
"$$awk_code" $(srcdir)/$(VC14_LIBTMPL) > $(VC14_LIBVCXPROJ) || { exit 1; }; \
\
echo "generating '$(VC14_SRCVCXPROJ)'"; \
awk -v proj_type=vcxproj \
-v src_srcs="$$sorted_src_srcs" \
-v src_hdrs="$$sorted_src_hdrs" \
-v src_rc="$$win32_src_rc" \
-v src_x_srcs="$$sorted_src_x_srcs" \
-v src_x_hdrs="$$sorted_src_x_hdrs" \
"$$awk_code" $(srcdir)/$(VC14_SRCTMPL) > $(VC14_SRCVCXPROJ) || { exit 1; };)

View File

@ -5,11 +5,11 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2015, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2010, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
@ -70,22 +70,29 @@ mingw32:
$(MAKE) -C lib -f Makefile.m32
$(MAKE) -C src -f Makefile.m32
mingw32-zlib:
$(MAKE) -C lib -f Makefile.m32 ZLIB=1
$(MAKE) -C src -f Makefile.m32 ZLIB=1
mingw32-ssl-zlib:
$(MAKE) -C lib -f Makefile.m32 SSL=1 ZLIB=1
$(MAKE) -C src -f Makefile.m32 SSL=1 ZLIB=1
mingw32-ssh2-ssl-zlib:
$(MAKE) -C lib -f Makefile.m32 SSH2=1 SSL=1 ZLIB=1
$(MAKE) -C src -f Makefile.m32 SSH2=1 SSL=1 ZLIB=1
mingw32-ssh2-ssl-sspi-zlib:
$(MAKE) -C lib -f Makefile.m32 SSH2=1 SSL=1 SSPI=1 ZLIB=1
$(MAKE) -C src -f Makefile.m32 SSH2=1 SSL=1 SSPI=1 ZLIB=1
mingw32-rtmp-ssh2-ssl-sspi-zlib:
$(MAKE) -C lib -f Makefile.m32 RTMP=1 SSH2=1 SSL=1 SSPI=1 ZLIB=1
$(MAKE) -C src -f Makefile.m32 RTMP=1 SSH2=1 SSL=1 SSPI=1 ZLIB=1
mingw32-clean:
$(MAKE) -C lib -f Makefile.m32 clean
$(MAKE) -C src -f Makefile.m32 clean
$(MAKE) -C docs/examples -f Makefile.m32 clean
mingw32-vclean mingw32-distclean:
$(MAKE) -C lib -f Makefile.m32 vclean
$(MAKE) -C src -f Makefile.m32 vclean
$(MAKE) -C docs/examples -f Makefile.m32 vclean
mingw32-examples%:
$(MAKE) -C docs/examples -f Makefile.m32 CFG=$@
mingw32%:
$(MAKE) -C lib -f Makefile.m32 CFG=$@
$(MAKE) -C src -f Makefile.m32 CFG=$@
vc-clean: $(VC)
cd lib
@ -130,138 +137,18 @@ vc-x64: $(VC)
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release
vc-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release WINDOWS_SSPI=1
vc-x64-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release WINDOWS_SSPI=1
vc-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release USE_IDN=1
vc-x64-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release USE_IDN=1
vc-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release USE_IDN=1 WINDOWS_SSPI=1
vc-x64-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release USE_IDN=1 WINDOWS_SSPI=1
vc-zlib: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-zlib
cd ..\src
nmake /f Makefile.$(VC) cfg=release-zlib
vc-x64-zlib: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-zlib
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-zlib
vc-zlib-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-zlib WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-zlib WINDOWS_SSPI=1
vc-x64-zlib-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-zlib WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-zlib WINDOWS_SSPI=1
vc-zlib-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-zlib USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-zlib USE_IDN=1
vc-x64-zlib-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-zlib USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-zlib USE_IDN=1
vc-zlib-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-zlib USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-zlib USE_IDN=1 WINDOWS_SSPI=1
vc-x64-zlib-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-zlib USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-zlib USE_IDN=1 WINDOWS_SSPI=1
vc-ssl: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl
vc-x64-ssl: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl
vc-ssl-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl WINDOWS_SSPI=1
vc-x64-ssl-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl WINDOWS_SSPI=1
vc-ssl-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl USE_IDN=1
vc-x64-ssl-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl USE_IDN=1
vc-ssl-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl USE_IDN=1 WINDOWS_SSPI=1
vc-x64-ssl-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl USE_IDN=1 WINDOWS_SSPI=1
vc-ssl-zlib: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl-zlib
@ -274,138 +161,6 @@ vc-x64-ssl-zlib: $(VC)
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-zlib
vc-ssl-zlib-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl-zlib WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl-zlib WINDOWS_SSPI=1
vc-x64-ssl-zlib-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-zlib WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-zlib WINDOWS_SSPI=1
vc-ssl-zlib-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl-zlib USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl-zlib USE_IDN=1
vc-x64-ssl-zlib-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-zlib USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-zlib USE_IDN=1
vc-ssl-zlib-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl-zlib USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl-zlib USE_IDN=1 WINDOWS_SSPI=1
vc-x64-ssl-zlib-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-zlib USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-zlib USE_IDN=1 WINDOWS_SSPI=1
vc-ssl-ssh2-zlib: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl-ssh2-zlib
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl-ssh2-zlib
vc-x64-ssl-ssh2-zlib: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-ssh2-zlib
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-ssh2-zlib
vc-ssl-ssh2-zlib-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl-ssh2-zlib WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl-ssh2-zlib WINDOWS_SSPI=1
vc-x64-ssl-ssh2-zlib-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-ssh2-zlib WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-ssh2-zlib WINDOWS_SSPI=1
vc-ssl-ssh2-zlib-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl-ssh2-zlib USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl-ssh2-zlib USE_IDN=1
vc-x64-ssl-ssh2-zlib-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-ssh2-zlib USE_IDN=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-ssh2-zlib USE_IDN=1
vc-ssl-ssh2-zlib-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl-ssh2-zlib USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-ssl-ssh2-zlib USE_IDN=1 WINDOWS_SSPI=1
vc-x64-ssl-ssh2-zlib-idn-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-ssh2-zlib USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-ssl-ssh2-zlib USE_IDN=1 WINDOWS_SSPI=1
vc-winssl: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-winssl WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-winssl WINDOWS_SSPI=1
vc-x64-winssl: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-winssl WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-winssl WINDOWS_SSPI=1
vc-winssl-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-winssl USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-winssl USE_IDN=1 WINDOWS_SSPI=1
vc-x64-winssl-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-winssl USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-winssl USE_IDN=1 WINDOWS_SSPI=1
vc-winssl-zlib: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-winssl-zlib WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-winssl-zlib WINDOWS_SSPI=1
vc-x64-winssl-zlib: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-winssl-zlib WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-winssl-zlib WINDOWS_SSPI=1
vc-winssl-zlib-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-winssl-zlib USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release-winssl-zlib USE_IDN=1 WINDOWS_SSPI=1
vc-x64-winssl-zlib-idn: $(VC)
cd lib
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-winssl-zlib USE_IDN=1 WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) MACHINE=x64 cfg=release-winssl-zlib USE_IDN=1 WINDOWS_SSPI=1
vc-ssl-dll: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release-ssl-dll
@ -448,6 +203,12 @@ vc-zlib-dll: $(VC)
cd ..\src
nmake /f Makefile.$(VC) cfg=release-zlib-dll
vc-sspi: $(VC)
cd lib
nmake /f Makefile.$(VC) cfg=release WINDOWS_SSPI=1
cd ..\src
nmake /f Makefile.$(VC) cfg=release WINDOWS_SSPI=1
djgpp:
$(MAKE) -C lib -f Makefile.dj
$(MAKE) -C src -f Makefile.dj
@ -468,27 +229,34 @@ netware:
$(MAKE) -C lib -f Makefile.netware
$(MAKE) -C src -f Makefile.netware
netware-ares:
$(MAKE) -C lib -f Makefile.netware WITH_ARES=1
$(MAKE) -C src -f Makefile.netware WITH_ARES=1
netware-ssl:
$(MAKE) -C lib -f Makefile.netware WITH_SSL=1
$(MAKE) -C src -f Makefile.netware WITH_SSL=1
netware-ssl-zlib:
$(MAKE) -C lib -f Makefile.netware WITH_SSL=1 WITH_ZLIB=1
$(MAKE) -C src -f Makefile.netware WITH_SSL=1 WITH_ZLIB=1
netware-ssh2-ssl-zlib:
$(MAKE) -C lib -f Makefile.netware WITH_SSH2=1 WITH_SSL=1 WITH_ZLIB=1
$(MAKE) -C src -f Makefile.netware WITH_SSH2=1 WITH_SSL=1 WITH_ZLIB=1
netware-zlib:
$(MAKE) -C lib -f Makefile.netware WITH_ZLIB=1
$(MAKE) -C src -f Makefile.netware WITH_ZLIB=1
netware-clean:
$(MAKE) -C lib -f Makefile.netware clean
$(MAKE) -C src -f Makefile.netware clean
$(MAKE) -C docs/examples -f Makefile.netware clean
netware-vclean netware-distclean:
$(MAKE) -C lib -f Makefile.netware vclean
$(MAKE) -C src -f Makefile.netware vclean
$(MAKE) -C docs/examples -f Makefile.netware vclean
netware-install:
$(MAKE) -C lib -f Makefile.netware install
$(MAKE) -C src -f Makefile.netware install
netware-examples-%:
$(MAKE) -C docs/examples -f Makefile.netware CFG=$@
netware-%:
$(MAKE) -C lib -f Makefile.netware CFG=$@
$(MAKE) -C src -f Makefile.netware CFG=$@
unix: all
unix-ssl: ssl
@ -500,18 +268,6 @@ linux-ssl: ssl
# We don't need to do anything for vc6.
vc6:
# VC7 makefiles are for use with VS.NET and VS.NET 2003
vc7: lib/Makefile.vc7 src/Makefile.vc7
lib/Makefile.vc7: lib/Makefile.vc6
@echo "generate $@"
@sed -e "s/VC6/VC7/g" lib/Makefile.vc6 > lib/Makefile.vc7
src/Makefile.vc7: src/Makefile.vc6
@echo "generate $@"
@sed -e "s/VC6/VC7/g" src/Makefile.vc6 > src/Makefile.vc7
# VC8 makefiles are for use with VS2005
vc8: lib/Makefile.vc8 src/Makefile.vc8
lib/Makefile.vc8: lib/Makefile.vc6
@ -544,39 +300,6 @@ src/Makefile.vc10: src/Makefile.vc6
@echo "generate $@"
@sed -e "s#/GX /DWIN32 /YX#/EHsc /DWIN32#" -e "s#/GZ#/RTC1#" -e "s/ws2_32.lib/ws2_32.lib/g" -e "s/vc6/vc10/g" -e "s/VC6/VC10/g" src/Makefile.vc6 > src/Makefile.vc10
# VC11 makefiles are for use with VS2012
vc11: lib/Makefile.vc11 src/Makefile.vc11
lib/Makefile.vc11: lib/Makefile.vc6
@echo "generate $@"
@sed -e "s#/GX /DWIN32 /YX#/EHsc /DWIN32#" -e "s#/GZ#/RTC1#" -e "s/ws2_32.lib/ws2_32.lib/g" -e "s/vc6/vc11/g" -e "s/VC6/VC11/g" lib/Makefile.vc6 > lib/Makefile.vc11
src/Makefile.vc11: src/Makefile.vc6
@echo "generate $@"
@sed -e "s#/GX /DWIN32 /YX#/EHsc /DWIN32#" -e "s#/GZ#/RTC1#" -e "s/ws2_32.lib/ws2_32.lib/g" -e "s/vc6/vc11/g" -e "s/VC6/VC11/g" src/Makefile.vc6 > src/Makefile.vc11
# VC12 makefiles are for use with VS2013
vc12: lib/Makefile.vc12 src/Makefile.vc12
lib/Makefile.vc12: lib/Makefile.vc6
@echo "generate $@"
@sed -e "s#/GX /DWIN32 /YX#/EHsc /DWIN32#" -e "s#/GZ#/RTC1#" -e "s/ws2_32.lib/ws2_32.lib/g" -e "s/vc6/vc12/g" -e "s/VC6/VC12/g" lib/Makefile.vc6 > lib/Makefile.vc12
src/Makefile.vc12: src/Makefile.vc6
@echo "generate $@"
@sed -e "s#/GX /DWIN32 /YX#/EHsc /DWIN32#" -e "s#/GZ#/RTC1#" -e "s/ws2_32.lib/ws2_32.lib/g" -e "s/vc6/vc12/g" -e "s/VC6/VC12/g" src/Makefile.vc6 > src/Makefile.vc12
# VC14 makefiles are for use with VS2015
vc14: lib/Makefile.vc14 src/Makefile.vc14
lib/Makefile.vc14: lib/Makefile.vc6
@echo "generate $@"
@sed -e "s#/GX /DWIN32 /YX#/EHsc /DWIN32#" -e "s#/GZ#/RTC1#" -e "s/ws2_32.lib/ws2_32.lib/g" -e "s/vc6/vc14/g" -e "s/VC6/VC14/g" lib/Makefile.vc6 > lib/Makefile.vc14
src/Makefile.vc14: src/Makefile.vc6
@echo "generate $@"
@sed -e "s#/GX /DWIN32 /YX#/EHsc /DWIN32#" -e "s#/GZ#/RTC1#" -e "s/ws2_32.lib/ws2_32.lib/g" -e "s/vc6/vc14/g" -e "s/VC6/VC14/g" src/Makefile.vc6 > src/Makefile.vc14
ca-bundle: lib/mk-ca-bundle.pl
@echo "generate a fresh ca-bundle.crt"
@perl $< -b -l -u lib/ca-bundle.crt

View File

@ -9,7 +9,7 @@
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is

8
README
View File

@ -24,7 +24,7 @@ README
CONTACT
If you have problems, questions, ideas or suggestions, please contact us
by posting to a suitable mailing list. See https://curl.haxx.se/mail/
by posting to a suitable mailing list. See http://curl.haxx.se/mail/
All contributors to the project are listed in the THANKS document.
@ -32,18 +32,18 @@ WEB SITE
Visit the curl web site for the latest news and downloads:
https://curl.haxx.se/
http://curl.haxx.se/
GIT
To download the very latest source off the GIT server do this:
git clone https://github.com/curl/curl.git
git clone git://github.com/bagder/curl.git
(you'll get a directory named curl created, filled with the source code)
NOTICE
Curl contains pieces of source code that is Copyright (c) 1998, 1999
Kungliga Tekniska Högskolan. This notice is included here to comply with the
Kungliga Tekniska Högskolan. This notice is included here to comply with the
distribution terms.

View File

@ -1,45 +0,0 @@
Curl is a command line tool for transferring data specified with URL
syntax. Find out how to use curl by reading [the curl.1 man
page](https://curl.haxx.se/docs/manpage.html) or [the MANUAL
document](https://curl.haxx.se/docs/manual.html). Find out how to install Curl
by reading [the INSTALL document](https://curl.haxx.se/docs/install.html).
libcurl is the library curl is using to do its job. It is readily available to
be used by your software. Read [the libcurl.3 man
page](https://curl.haxx.se/libcurl/c/libcurl.html) to learn how!
You find answers to the most frequent questions we get in [the FAQ
document](https://curl.haxx.se/docs/faq.html).
Study [the COPYING file](https://curl.haxx.se/docs/copyright.html) for
distribution terms and similar. If you distribute curl binaries or other
binaries that involve libcurl, you might enjoy [the LICENSE-MIXING
document](https://curl.haxx.se/legal/licmix.html).
## CONTACT
If you have problems, questions, ideas or suggestions, please contact us by
posting to a suitable [mailing list](https://curl.haxx.se/mail/).
All contributors to the project are listed in [the THANKS
document](https://curl.haxx.se/docs/thanks.html).
## WEB SITE
Visit the [curl web site](https://curl.haxx.se/) for the latest news and
downloads.
## GIT
To download the very latest source off the GIT server do this:
git clone https://github.com/curl/curl.git
(you'll get a directory named curl created, filled with the source code)
## NOTICE
Curl contains pieces of source code that is Copyright (c) 1998, 1999 Kungliga
Tekniska Högskolan. This notice is included here to comply with the
distribution terms.

View File

@ -1,159 +1,81 @@
Curl and libcurl 7.48.0
Curl and libcurl 7.21.5
Public curl releases: 153
Command line options: 179
curl_easy_setopt() options: 221
Public functions in libcurl: 61
Contributors: 1364
Public curl releases: 121
Command line options: 143
curl_easy_setopt() options: 185
Public functions in libcurl: 58
Known libcurl bindings: 39
Contributors: 854
This release includes the following changes:
o configure: --with-ca-fallback: use built-in TLS CA fallback [2]
o TFTP: add --tftp-no-options to expose CURLOPT_TFTP_NO_OPTIONS [22]
o getinfo: CURLINFO_TLS_SSL_PTR supersedes CURLINFO_TLS_SESSION [25]
o added CODE_STYLE.md [47]
o SOCKOPTFUNCTION: callback can say already-connected
o Added --netrc-file
o Added (new) support for cyassl
o TSL-SRP: enabled with OpenSSL
o Added CURLE_NOT_BUILT_IN and CURLE_UNKNOWN_OPTION
This release includes the following bugfixes:
o Proxy-Connection: stop sending this header by default [1]
o os400: sync ILE/RPG definitions with latest public header files
o cookies: allow spaces in cookie names, cut of trailing spaces [3]
o tool_urlglob: Allow reserved dos device names (Windows) [4]
o openssl: remove most BoringSSL #ifdefs [5]
o tool_doswin: Support for literal path prefix \\?\
o mbedtls: fix ALPN usage segfault [6]
o mbedtls: fix memory leak when destroying SSL connection data [7]
o nss: do not count enabled cipher-suites
o examples/cookie_interface.c: add cleanup call
o examples: adhere to curl code style
o curlx_tvdiff: handle 32bit time_t overflows [8]
o dist: ship buildconf.bat too
o curl.1: --disable-{eprt,epsv} are ignored for IPv6 hosts [9]
o generate.bat: Fix comment bug by removing old comments [10]
o test1604: Add to Makefile.inc so it gets run
o gtls: fix for builds lacking encrypted key file support [11]
o SCP: use libssh2_scp_recv2 to support > 2GB files on windows [12]
o CURLOPT_CONNECTTIMEOUT_MS.3: Fix example to use milliseconds option [13]
o cookie: do not refuse cookies to localhost [14]
o openssl: avoid direct PKEY access with OpenSSL 1.1.0 [15]
o http: Don't break the header into chunks if HTTP/2 [16]
o http2: don't decompress gzip decoding automatically [17]
o curlx.c: i2s_ASN1_IA5STRING() clashes with an openssl function
o curl.1: add a missing dash
o curl.1: HTTP headers for --cookie must be Set-Cookie style [18]
o CURLOPT_COOKIEFILE.3: HTTP headers must be Set-Cookie style [18]
o curl_sasl: Fix memory leak in digest parser [19]
o src/Makefile.m32: add CURL_{LD,C}FLAGS_EXTRAS support [20]
o CURLOPT_DEBUGFUNCTION.3: Fix example
o runtests: Fixed usage of %PWD on MinGW64 [21]
o tests/sshserver.pl: use RSA instead of DSA for host auth [23]
o multi_remove_handle: keep the timeout list until after disconnect [24]
o Curl_read: check for activated HTTP/1 pipelining, not only requested
o configure: warn on invalid ca bundle or path [26]
o file: try reading from files with no size [27]
o getinfo: Add support for mbedTLS TLS session info
o formpost: fix memory leaks in AddFormData error branches [28]
o makefile.m32: allow to pass .dll/.exe-specific LDFLAGS [29]
o url: if Curl_done is premature then pipeline not in use [30]
o cookie: remove redundant check [31]
o cookie: Don't expire session cookies in remove_expired [32]
o makefile.m32: fix to allow -ssh2-winssl combination [33]
o checksrc.bat: Fixed cannot find perl if installed but not in path
o build-openssl.bat: Fixed cannot find perl if installed but not in path
o mbedtls: fix user-specified SSL protocol version
o makefile.m32: add missing libs for static -winssl-ssh2 builds [34]
o test46: change cookie expiry date [35]
o pipeline: Sanity check pipeline pointer before accessing it [36]
o openssl: use the correct OpenSSL/BoringSSL/LibreSSL in messages
o ftp_done: clear tunnel_state when secondary socket closes [37]
o opt-docs: fix heading macros [38]
o imap/pop3/smtp: Fixed connections upgraded with TLS are not reused [39]
o curl_multi_wait: never return -1 in 'numfds' [40]
o url.c: fix clang warning: no newline at end of file
o krb5: improved type handling to avoid clang compiler warnings
o cookies: first n/v pair in Set-Cookie: is the cookie, then parameters [41]
o multi: avoid blocking during CURLM_STATE_WAITPROXYCONNECT [42]
o multi hash: ensure modulo performed on curl_socket_t [43]
o curl: glob_range: no need to check unsigned variable for negative
o easy: add check to malloc() when running event-based
o CURLOPT_SSLENGINE.3: Only for OpenSSL built with engine support [44]
o version: thread safety
o openssl: verbose: show matching SAN pattern
o openssl: adapt to OpenSSL 1.1.0 API breakage in ERR_remove_thread_state()
o formdata.c: Fixed compilation warning
o configure: use cpp -P when needed [45]
o imap.c: Fixed compilation warning with /Wall enabled
o config-w32.h: Fixed compilation warning when /Wall enabled
o ftp/imap/pop3/smtp: Fixed compilation warning when /Wall enabled
o build: Added missing Visual Studio filter files for VC10 onwards
o easy: Remove poll failure check in easy_transfer
o mbedtls: fix compiler warning
o build-wolfssl: Update VS properties for wolfSSL v3.9.0
o Fixed various compilation warnings when verbose strings disabled
o nss: avoid memory leak on SSL connection failure
o nss: do not ignore failure of SSL handshake
o multi: better failed connect handling when using FTP, SMTP, POP3 and IMAP
o runtests.pl: fix pid number concatenation that prevented it from killing
the correct process at times
o PolarSSL: Return 0 on receiving TLS CLOSE_NOTIFY alert
o curl_easy_setopt.3: Removed wrong reference to CURLOPT_USERPASSWORD
o multi: close connection on timeout
o IMAP in multi mode does SSL connections non-blocking
o honours the --disable-ldaps configure option
o Force setopt constants written by --libcurl to be long
o ssh_connect: treat libssh2 return code better
o SFTP upload could stall the state machine when the multi_socket API was
used
o SFTP and SCP could leak memory when used with the multi interface and
the connection was closed
o Added missing file to repair the MSVC makefiles
o Fixed detection of recvfrom arguments on Android/bionic
o GSS: handle reuse fix
o transfer: avoid insane conversion of time_t
o nss: do not ignore value of CURLOPT_SSL_VERIFYPEER in certain cases
o SMTP-multi: non-blocking connect
o SFTP-multi: set cselect for sftp and scp to fix "stall" risk
o configure: removed wrongly claimed default paths
o pop3: fixed torture tests to succeed
o symbols-in-versions: many corrections
o if a HTTP request gets retried because the connection was dead, rewind if
any data was sent as part of it
o only probe for working ipv6 once and then re-use that info for further
requests
o requests that are asked to bound to a local interface/port will no longer
wrongly re-use connections that aren't
o libcurl.m4: Add missing quotes in AC_LINK_IFELSE
o progress output: don't print the last update on a separate line
o POP3: the command to send is STLS, not STARTTLS
o POP3: PASS command was not sent after upgrade to TLS
o configure: fix libtool warning
o nss: allow to use multiple client certificates for a single host
o HTTP pipelining: Fix handling of zero-length responses
o Don't list NTLM in curl-config when HTTP is disabled
o curl_easy_setopt.3: CURLOPT_RESOLVE typo version
o OpenSSL: build fine with no-sslv2 versions
o checkconnection: don't call with NULL pointer with RTSP and multi interface
o Borland makefile updates
o configure: libssh2 link fix without pkg-config
o certinfo crash
o CCC crash
This release includes the following known bugs:
o see docs/KNOWN_BUGS (https://curl.haxx.se/docs/knownbugs.html)
o see docs/KNOWN_BUGS (http://curl.haxx.se/docs/knownbugs.html)
This release would not have looked like this without help, code, reports and
advice from friends like these:
Anders Bakken, Brad Fitzpatrick, Clint Clayton, Dan Fandrich,
Daniel Stenberg, David Benjamin, David Byron, Emil Lerner, Eric S. Raymond,
Gisle Vanem, Jaime Fullaondo, Jeffrey Walton, Jesse Tan, Justin Ehlert,
Kamil Dudka, Kazuho Oku, Ludwig Nussel, Maksim Kuzevanov, Michael König,
Oliver Graute, Patrick Monnerat, Rafael Antonio, Ray Satiro, Seth Mos,
Shine Fan, Steve Holme, Tatsuhiro Tsujikawa, Timotej Lazar, Tim Rühsen,
Viktor Szakáts,
(30 contributors)
Mike Crowe, Kamil Dudka, Julien Chaffraix, Hoi-Ho Chan, Ben Noordhuis,
Dan Fandrich, Henry Ludemann, Karl M, Manuel Massing, Marcus Sundberg,
Stefan Krause, Todd A Ouska, Saqib Ali, Andre Guibert de Bruet,
Tor Arntsen, Vincent Torri, Dave Reisner, Chris Smowton, Tinus van den Berg,
Hongli Lai, Gisle Vanem, Andrei Benea, Mehmet Bozkurt
Thanks! (and sorry if I forgot to mention someone)
References to bug reports and discussions on issues:
[1] = https://curl.haxx.se/bug/?i=633
[2] = https://curl.haxx.se/bug/?i=569
[3] = https://curl.haxx.se/bug/?i=639
[4] = https://github.com/curl/curl/commit/4520534#commitcomment-15954863
[5] = https://curl.haxx.se/bug/?i=640
[6] = https://curl.haxx.se/bug/?i=642
[7] = https://curl.haxx.se/bug/?i=626
[8] = https://curl.haxx.se/bug/?i=646
[9] = https://bugzilla.redhat.com/1305970
[10] = https://curl.haxx.se/bug/?i=649
[11] = https://curl.haxx.se/bug/?i=651
[12] = https://curl.haxx.se/bug/?i=451
[13] = https://curl.haxx.se/bug/?i=653
[14] = https://curl.haxx.se/bug/?i=658
[15] = https://curl.haxx.se/bug/?i=650
[16] = https://curl.haxx.se/bug/?i=659
[17] = https://curl.haxx.se/bug/?i=661
[18] = https://curl.haxx.se/bug/?i=666
[19] = https://curl.haxx.se/bug/?i=667
[20] = https://curl.haxx.se/bug/?i=670
[21] = https://curl.haxx.se/bug/?i=672
[22] = https://curl.haxx.se/bug/?i=481
[23] = https://curl.haxx.se/bug/?i=676
[24] = https://curl.haxx.se/mail/lib-2016-02/0097.html
[25] = https://curl.haxx.se/libcurl/c/CURLINFO_TLS_SSL_PTR.html
[26] = https://curl.haxx.se/bug/?i=404
[27] = https://curl.haxx.se/bug/?i=681
[28] = https://curl.haxx.se/bug/?i=688
[29] = https://curl.haxx.se/bug/?i=689
[30] = https://curl.haxx.se/bug/?i=690
[31] = https://curl.haxx.se/bug/?i=695
[32] = https://curl.haxx.se/bug/?i=697
[33] = https://curl.haxx.se/bug/?i=692
[34] = https://curl.haxx.se/bug/?i=693
[35] = https://curl.haxx.se/bug/?i=697
[36] = https://curl.haxx.se/bug/?i=704
[37] = https://curl.haxx.se/bug/?i=701
[38] = https://curl.haxx.se/bug/?i=705
[39] = https://curl.haxx.se/bug/?i=422
[40] = https://curl.haxx.se/bug/?i=707
[41] = https://curl.haxx.se/bug/?i=709
[42] = https://curl.haxx.se/bug/?i=703
[43] = https://curl.haxx.se/bug/?i=712
[44] = https://curl.haxx.se/mail/lib-2016-03/0150.html
[45] = https://curl.haxx.se/bug/?i=719
[47] = https://curl.haxx.se/dev/code-style.html

15
TODO-RELEASE Normal file
View File

@ -0,0 +1,15 @@
To be addressed in 7.21.6
=========================
284 - bug 3172608 "No re-authentication when HTTP connecton is closed"
http://curl.haxx.se/bug/view.cgi?id=3172608
Would be nice if someone could verify the suggested patch
285 - bug 3163118 "Allow programatic use of telnet on Windows"
http://curl.haxx.se/bug/view.cgi?id=3163118
Would appreciate a Windows developer to give it a look before we apply
the suggested patch
287 - bug 3215314 Post quote operation to rename fails in Windows
289 -

View File

@ -5,11 +5,11 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2015, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
@ -20,6 +20,7 @@
#
#***************************************************************************
dnl CURL_CHECK_DEF (SYMBOL, [INCLUDES], [SILENT])
dnl -------------------------------------------------
dnl Use the C preprocessor to find out if the given object-style symbol
@ -30,10 +31,6 @@ dnl result in a set of double-quoted strings the returned expansion will
dnl actually be a single double-quoted string concatenating all them.
AC_DEFUN([CURL_CHECK_DEF], [
AC_REQUIRE([CURL_CPP_P])dnl
OLDCPPFLAGS=$CPPFLAGS
# CPPPFLAGS comes from CURL_CPP_P
CPPFLAGS="$CPPPFLAGS"
AS_VAR_PUSHDEF([ac_HaveDef], [curl_cv_have_def_$1])dnl
AS_VAR_PUSHDEF([ac_Def], [curl_cv_def_$1])dnl
if test -z "$SED"; then
@ -54,7 +51,7 @@ CURL_DEF_TOKEN $1
],[
tmp_exp=`eval "$ac_cpp conftest.$ac_ext" 2>/dev/null | \
"$GREP" CURL_DEF_TOKEN 2>/dev/null | \
"$SED" 's/.*CURL_DEF_TOKEN[[ ]][[ ]]*//' 2>/dev/null | \
"$SED" 's/.*CURL_DEF_TOKEN[[ ]]//' 2>/dev/null | \
"$SED" 's/[["]][[ ]]*[["]]//g' 2>/dev/null`
if test -z "$tmp_exp" || test "$tmp_exp" = "$1"; then
tmp_exp=""
@ -70,7 +67,6 @@ CURL_DEF_TOKEN $1
fi
AS_VAR_POPDEF([ac_Def])dnl
AS_VAR_POPDEF([ac_HaveDef])dnl
CPPFLAGS=$OLDCPPFLAGS
])
@ -134,7 +130,7 @@ int main (void)
]])
],[
tst_lib_xnet_required="yes"
LIBS="-lxnet $LIBS"
LIBS="$LIBS -lxnet"
])
AC_MSG_RESULT([$tst_lib_xnet_required])
])
@ -154,7 +150,6 @@ AC_DEFUN([CURL_CHECK_AIX_ALL_SOURCE], [
#endif])
AC_BEFORE([$0], [AC_SYS_LARGEFILE])dnl
AC_BEFORE([$0], [CURL_CONFIGURE_REENTRANT])dnl
AC_BEFORE([$0], [CURL_CONFIGURE_PULL_SYS_POLL])dnl
AC_MSG_CHECKING([if OS is AIX (to define _ALL_SOURCE)])
AC_EGREP_CPP([yes_this_is_aix],[
#ifdef _AIX
@ -233,7 +228,12 @@ AC_DEFUN([CURL_CHECK_NATIVE_WINDOWS], [
])
fi
])
AM_CONDITIONAL(DOING_NATIVE_WINDOWS, test "x$ac_cv_native_windows" = xyes)
case "$ac_cv_native_windows" in
yes)
AC_DEFINE_UNQUOTED(NATIVE_WINDOWS, 1,
[Define to 1 if you are building a native Windows target.])
;;
esac
])
@ -1601,6 +1601,200 @@ AC_DEFUN([CURL_CHECK_FUNC_SEND], [
fi
])
dnl CURL_CHECK_FUNC_RECVFROM
dnl -------------------------------------------------
dnl Test if the socket recvfrom() function is available,
dnl and check its return type and the types of its
dnl arguments. If the function succeeds HAVE_RECVFROM
dnl will be defined, defining the types of the arguments
dnl in RECVFROM_TYPE_ARG1, RECVFROM_TYPE_ARG2, and so on
dnl to RECVFROM_TYPE_ARG6, defining also the type of the
dnl function return value in RECVFROM_TYPE_RETV.
dnl Notice that the types returned for pointer arguments
dnl will actually be the type pointed by the pointer.
AC_DEFUN([CURL_CHECK_FUNC_RECVFROM], [
AC_REQUIRE([CURL_CHECK_HEADER_WINSOCK])dnl
AC_REQUIRE([CURL_CHECK_HEADER_WINSOCK2])dnl
AC_CHECK_HEADERS(sys/types.h sys/socket.h)
#
AC_MSG_CHECKING([for recvfrom])
AC_LINK_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#include <windows.h>
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#else
#ifdef HAVE_WINSOCK_H
#include <winsock.h>
#endif
#endif
#else
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
#ifdef HAVE_SYS_SOCKET_H
#include <sys/socket.h>
#endif
#endif
]],[[
recvfrom(0, 0, 0, 0, 0, 0);
]])
],[
AC_MSG_RESULT([yes])
curl_cv_recvfrom="yes"
],[
AC_MSG_RESULT([no])
curl_cv_recvfrom="no"
])
#
if test "$curl_cv_recvfrom" = "yes"; then
AC_CACHE_CHECK([types of args and return type for recvfrom],
[curl_cv_func_recvfrom_args], [
curl_cv_func_recvfrom_args="unknown"
for recvfrom_retv in 'int' 'ssize_t'; do
for recvfrom_arg1 in 'int' 'ssize_t' 'SOCKET'; do
for recvfrom_arg2 in 'char *' 'void *'; do
for recvfrom_arg3 in 'size_t' 'int' 'socklen_t' 'unsigned int'; do
for recvfrom_arg4 in 'int' 'unsigned int'; do
for recvfrom_arg5 in 'const struct sockaddr *' 'struct sockaddr *' 'void *'; do
for recvfrom_arg6 in 'socklen_t *' 'int *' 'unsigned int *' 'size_t *' 'void *'; do
if test "$curl_cv_func_recvfrom_args" = "unknown"; then
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#undef inline
#ifdef HAVE_WINDOWS_H
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#include <windows.h>
#ifdef HAVE_WINSOCK2_H
#include <winsock2.h>
#else
#ifdef HAVE_WINSOCK_H
#include <winsock.h>
#endif
#endif
#define RECVFROMCALLCONV PASCAL
#else
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
#ifdef HAVE_SYS_SOCKET_H
#include <sys/socket.h>
#endif
#define RECVFROMCALLCONV
#endif
extern $recvfrom_retv RECVFROMCALLCONV
recvfrom($recvfrom_arg1, $recvfrom_arg2,
$recvfrom_arg3, $recvfrom_arg4,
$recvfrom_arg5, $recvfrom_arg6);
]],[[
$recvfrom_arg1 s=0;
$recvfrom_arg2 buf=0;
$recvfrom_arg3 len=0;
$recvfrom_arg4 flags=0;
$recvfrom_arg5 addr=0;
$recvfrom_arg6 addrlen=0;
$recvfrom_retv res=0;
res = recvfrom(s, buf, len, flags, addr, addrlen);
]])
],[
curl_cv_func_recvfrom_args="$recvfrom_arg1,$recvfrom_arg2,$recvfrom_arg3,$recvfrom_arg4,$recvfrom_arg5,$recvfrom_arg6,$recvfrom_retv"
])
fi
done
done
done
done
done
done
done
]) # AC-CACHE-CHECK
# Nearly last minute change for this release starts here
AC_DEFINE_UNQUOTED(HAVE_RECVFROM, 1,
[Define to 1 if you have the recvfrom function.])
ac_cv_func_recvfrom="yes"
# Nearly last minute change for this release ends here
if test "$curl_cv_func_recvfrom_args" = "unknown"; then
AC_MSG_WARN([Cannot find proper types to use for recvfrom args])
else
recvfrom_prev_IFS=$IFS; IFS=','
set dummy `echo "$curl_cv_func_recvfrom_args" | sed 's/\*/\*/g'`
IFS=$recvfrom_prev_IFS
shift
#
recvfrom_ptrt_arg2=$[2]
recvfrom_ptrt_arg5=$[5]
recvfrom_ptrt_arg6=$[6]
#
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_ARG1, $[1],
[Define to the type of arg 1 for recvfrom.])
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_ARG3, $[3],
[Define to the type of arg 3 for recvfrom.])
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_ARG4, $[4],
[Define to the type of arg 4 for recvfrom.])
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_RETV, $[7],
[Define to the function return type for recvfrom.])
#
prev_sh_opts=$-
#
case $prev_sh_opts in
*f*)
;;
*)
set -f
;;
esac
#
recvfrom_type_arg2=`echo $recvfrom_ptrt_arg2 | sed 's/ \*//'`
recvfrom_type_arg5=`echo $recvfrom_ptrt_arg5 | sed 's/ \*//'`
recvfrom_type_arg6=`echo $recvfrom_ptrt_arg6 | sed 's/ \*//'`
#
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_ARG2, $recvfrom_type_arg2,
[Define to the type pointed by arg 2 for recvfrom.])
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_ARG5, $recvfrom_type_arg5,
[Define to the type pointed by arg 5 for recvfrom.])
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_ARG6, $recvfrom_type_arg6,
[Define to the type pointed by arg 6 for recvfrom.])
#
if test "$recvfrom_type_arg2" = "void"; then
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_ARG2_IS_VOID, 1,
[Define to 1 if the type pointed by arg 2 for recvfrom is void.])
fi
if test "$recvfrom_type_arg5" = "void"; then
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_ARG5_IS_VOID, 1,
[Define to 1 if the type pointed by arg 5 for recvfrom is void.])
fi
if test "$recvfrom_type_arg6" = "void"; then
AC_DEFINE_UNQUOTED(RECVFROM_TYPE_ARG6_IS_VOID, 1,
[Define to 1 if the type pointed by arg 6 for recvfrom is void.])
fi
#
case $prev_sh_opts in
*f*)
;;
*)
set +f
;;
esac
#
AC_DEFINE_UNQUOTED(HAVE_RECVFROM, 1,
[Define to 1 if you have the recvfrom function.])
ac_cv_func_recvfrom="yes"
fi
else
AC_MSG_WARN([Unable to link function recvfrom])
fi
])
dnl CURL_CHECK_MSG_NOSIGNAL
dnl -------------------------------------------------
dnl Check for MSG_NOSIGNAL
@ -1855,10 +2049,8 @@ AC_DEFUN([CURL_CHECK_FUNC_CLOCK_GETTIME_MONOTONIC], [
AC_REQUIRE([AC_HEADER_TIME])dnl
AC_CHECK_HEADERS(sys/types.h sys/time.h time.h)
AC_MSG_CHECKING([for monotonic clock_gettime])
#
if test "x$dontwant_rt" == "xno" ; then
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
#ifdef HAVE_SYS_TYPES_H
#include <sys/types.h>
#endif
@ -1872,18 +2064,17 @@ AC_DEFUN([CURL_CHECK_FUNC_CLOCK_GETTIME_MONOTONIC], [
#include <time.h>
#endif
#endif
]],[[
struct timespec ts;
(void)clock_gettime(CLOCK_MONOTONIC, &ts);
]])
],[
AC_MSG_RESULT([yes])
ac_cv_func_clock_gettime="yes"
],[
AC_MSG_RESULT([no])
ac_cv_func_clock_gettime="no"
])
fi
]],[[
struct timespec ts;
(void)clock_gettime(CLOCK_MONOTONIC, &ts);
]])
],[
AC_MSG_RESULT([yes])
ac_cv_func_clock_gettime="yes"
],[
AC_MSG_RESULT([no])
ac_cv_func_clock_gettime="no"
])
dnl Definition of HAVE_CLOCK_GETTIME_MONOTONIC is intentionally postponed
dnl until library linking and run-time checks for clock_gettime succeed.
])
@ -1954,6 +2145,7 @@ AC_DEFUN([CURL_CHECK_LIBS_CLOCK_GETTIME_MONOTONIC], [
else
LIBS="$curl_cv_gclk_LIBS $curl_cv_save_LIBS"
fi
CURL_LIBS="$CURL_LIBS $curl_cv_gclk_LIBS"
AC_MSG_RESULT([$curl_cv_gclk_LIBS])
ac_cv_func_clock_gettime="yes"
;;
@ -2110,8 +2302,6 @@ AC_DEFUN([CURL_CONFIGURE_CURL_SOCKLEN_T], [
AC_REQUIRE([CURL_INCLUDES_SYS_SOCKET])dnl
AC_REQUIRE([CURL_PREPROCESS_CALLCONV])dnl
#
AC_BEFORE([$0], [CURL_CONFIGURE_PULL_SYS_POLL])dnl
#
AC_MSG_CHECKING([for curl_socklen_t data type])
curl_typeof_curl_socklen_t="unknown"
for arg1 in int SOCKET; do
@ -2220,45 +2410,6 @@ AC_DEFUN([CURL_CONFIGURE_CURL_SOCKLEN_T], [
])
dnl CURL_CONFIGURE_PULL_SYS_POLL
dnl -------------------------------------------------
dnl Find out if system header file sys/poll.h must be included by the
dnl external interface, making appropriate definitions for template file
dnl include/curl/curlbuild.h.in to properly configure and use the library.
dnl
dnl The need for the sys/poll.h inclusion arises mainly to properly
dnl interface AIX systems which define macros 'events' and 'revents'.
AC_DEFUN([CURL_CONFIGURE_PULL_SYS_POLL], [
AC_REQUIRE([CURL_INCLUDES_POLL])dnl
#
tst_poll_events_macro_defined="unknown"
#
AC_COMPILE_IFELSE([
AC_LANG_PROGRAM([[
$curl_includes_poll
]],[[
#if defined(events) || defined(revents)
return 0;
#else
force compilation error
#endif
]])
],[
tst_poll_events_macro_defined="yes"
],[
tst_poll_events_macro_defined="no"
])
#
if test "$tst_poll_events_macro_defined" = "yes"; then
if test "x$ac_cv_header_sys_poll_h" = "xyes"; then
CURL_DEFINE_UNQUOTED([CURL_PULL_SYS_POLL_H])
fi
fi
#
])
dnl CURL_CHECK_FUNC_SELECT
dnl -------------------------------------------------
dnl Test if the socket select() function is available,
@ -2459,6 +2610,23 @@ AC_DEFUN([CURL_CHECK_FUNC_SELECT], [
])
# This is only a temporary fix. This macro is here to replace the broken one
# delivered by the automake project (including the 1.9.6 release). As soon as
# they ship a working version we SHOULD remove this work-around.
AC_DEFUN([AM_MISSING_HAS_RUN],
[AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl
test x"${MISSING+set}" = xset || MISSING="\${SHELL} \"$am_aux_dir/missing\""
# Use eval to expand $SHELL
if eval "$MISSING --run true"; then
am_missing_run="$MISSING --run "
else
am_missing_run=
AC_MSG_WARN([`missing' script is too old or missing])
fi
])
dnl CURL_VERIFY_RUNTIMELIBS
dnl -------------------------------------------------
dnl Verify that the shared libs found so far can be used when running
@ -2574,8 +2742,7 @@ AC_DEFUN([CURL_CHECK_CA_BUNDLE], [
AC_MSG_CHECKING([default CA cert bundle/path])
AC_ARG_WITH(ca-bundle,
AC_HELP_STRING([--with-ca-bundle=FILE],
[Path to a file containing CA certificates (example: /etc/ca-bundle.crt)])
AC_HELP_STRING([--with-ca-bundle=FILE], [File name to use as CA bundle])
AC_HELP_STRING([--without-ca-bundle], [Don't use a default CA bundle]),
[
want_ca="$withval"
@ -2585,11 +2752,7 @@ AC_HELP_STRING([--without-ca-bundle], [Don't use a default CA bundle]),
],
[ want_ca="unset" ])
AC_ARG_WITH(ca-path,
AC_HELP_STRING([--with-ca-path=DIRECTORY],
[Path to a directory containing CA certificates stored individually, with \
their filenames in a hash format. This option can be used with OpenSSL, \
GnuTLS and PolarSSL backends. Refer to OpenSSL c_rehash for details. \
(example: /etc/certificates)])
AC_HELP_STRING([--with-ca-path=DIRECTORY], [Directory to use as CA path])
AC_HELP_STRING([--without-ca-path], [Don't use a default CA path]),
[
want_capath="$withval"
@ -2599,122 +2762,74 @@ AC_HELP_STRING([--without-ca-path], [Don't use a default CA path]),
],
[ want_capath="unset"])
ca_warning=" (warning: certs not found)"
capath_warning=" (warning: certs not found)"
check_capath=""
if test "x$want_ca" != "xno" -a "x$want_ca" != "xunset" -a \
"x$want_capath" != "xno" -a "x$want_capath" != "xunset"; then
dnl both given
ca="$want_ca"
capath="$want_capath"
AC_MSG_ERROR([Can't specify both --with-ca-bundle and --with-ca-path.])
elif test "x$want_ca" != "xno" -a "x$want_ca" != "xunset"; then
dnl --with-ca-bundle given
ca="$want_ca"
capath="no"
elif test "x$want_capath" != "xno" -a "x$want_capath" != "xunset"; then
dnl --with-ca-path given
if test "x$OPENSSL_ENABLED" != "x1" -a "x$GNUTLS_ENABLED" != "x1" -a "x$POLARSSL_ENABLED" != "x1"; then
AC_MSG_ERROR([--with-ca-path only works with OpenSSL, GnuTLS or PolarSSL])
if test "x$OPENSSL_ENABLED" != "x1"; then
AC_MSG_ERROR([--with-ca-path only works with openSSL])
fi
capath="$want_capath"
ca="no"
else
dnl neither of --with-ca-* given
dnl first try autodetecting a CA bundle , then a CA path
dnl both autodetections can be skipped by --without-ca-*
ca="no"
capath="no"
if test "x$cross_compiling" != "xyes"; then
dnl NOT cross-compiling and...
dnl neither of the --with-ca-* options are provided
if test "x$want_ca" = "xunset"; then
dnl the path we previously would have installed the curl ca bundle
dnl to, and thus we now check for an already existing cert in that
dnl place in case we find no other
if test "x$prefix" != xNONE; then
cac="${prefix}/share/curl/curl-ca-bundle.crt"
else
cac="$ac_default_prefix/share/curl/curl-ca-bundle.crt"
fi
if test "x$want_ca" = "xunset"; then
dnl the path we previously would have installed the curl ca bundle
dnl to, and thus we now check for an already existing cert in that place
dnl in case we find no other
if test "x$prefix" != xNONE; then
cac="${prefix}/share/curl/curl-ca-bundle.crt"
else
cac="$ac_default_prefix/share/curl/curl-ca-bundle.crt"
fi
for a in /etc/ssl/certs/ca-certificates.crt \
/etc/pki/tls/certs/ca-bundle.crt \
/usr/share/ssl/certs/ca-bundle.crt \
/usr/local/share/certs/ca-root.crt \
/etc/ssl/cert.pem \
"$cac"; do
if test -f "$a"; then
ca="$a"
break
fi
done
fi
if test "x$want_capath" = "xunset" -a "x$ca" = "xno" -a \
"x$OPENSSL_ENABLED" = "x1"; then
check_capath="/etc/ssl/certs/"
fi
else
dnl no option given and cross-compiling
AC_MSG_WARN([skipped the ca-cert path detection when cross-compiling])
for a in /etc/ssl/certs/ca-certificates.crt \
/etc/pki/tls/certs/ca-bundle.crt \
/usr/share/ssl/certs/ca-bundle.crt \
/usr/local/share/certs/ca-root.crt \
/etc/ssl/cert.pem \
"$cac"; do
if test -f "$a"; then
ca="$a"
break
fi
done
fi
if test "x$want_capath" = "xunset" -a "x$ca" = "xno" -a \
"x$OPENSSL_ENABLED" = "x1"; then
for a in /etc/ssl/certs/; do
if test -d "$a" && ls "$a"/[[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]].0 >/dev/null 2>/dev/null; then
capath="$a"
break
fi
done
fi
fi
if test "x$ca" = "xno" || test -f "$ca"; then
ca_warning=""
fi
if test "x$capath" != "xno"; then
check_capath="$capath"
fi
if test ! -z "$check_capath"; then
for a in "$check_capath"; do
if test -d "$a" && ls "$a"/[[0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f][0-9a-f]].0 >/dev/null 2>/dev/null; then
if test "x$capath" = "xno"; then
capath="$a"
fi
capath_warning=""
break
fi
done
fi
if test "x$capath" = "xno"; then
capath_warning=""
fi
if test "x$ca" != "xno"; then
CURL_CA_BUNDLE='"'$ca'"'
AC_DEFINE_UNQUOTED(CURL_CA_BUNDLE, "$ca", [Location of default ca bundle])
AC_SUBST(CURL_CA_BUNDLE)
AC_MSG_RESULT([$ca])
fi
if test "x$capath" != "xno"; then
elif test "x$capath" != "xno"; then
CURL_CA_PATH="\"$capath\""
AC_DEFINE_UNQUOTED(CURL_CA_PATH, "$capath", [Location of default ca path])
AC_MSG_RESULT([$capath (capath)])
fi
if test "x$ca" = "xno" && test "x$capath" = "xno"; then
else
AC_MSG_RESULT([no])
fi
AC_MSG_CHECKING([whether to use builtin CA store of SSL library])
AC_ARG_WITH(ca-fallback,
AC_HELP_STRING([--with-ca-fallback], [Use the built in CA store of the SSL library])
AC_HELP_STRING([--without-ca-fallback], [Don't use the built in CA store of the SSL library]),
[
if test "x$with_ca_fallback" != "xyes" -a "x$with_ca_fallback" != "xno"; then
AC_MSG_ERROR([--with-ca-fallback only allows yes or no as parameter])
fi
],
[ with_ca_fallback="no"])
AC_MSG_RESULT([$with_ca_fallback])
if test "x$with_ca_fallback" = "xyes"; then
if test "x$OPENSSL_ENABLED" != "x1" -a "x$GNUTLS_ENABLED" != "x1"; then
AC_MSG_ERROR([--with-ca-fallback only works with OpenSSL or GnuTLS])
fi
AC_DEFINE_UNQUOTED(CURL_CA_FALLBACK, 1, [define "1" to use built in CA store of SSL library ])
fi
])
@ -2728,7 +2843,7 @@ AC_DEFUN([DO_CURL_OFF_T_CHECK], [
tmp_includes=""
tmp_source=""
tmp_fmt=""
case XC_SH_TR_SH([$1]) in
case AS_TR_SH([$1]) in
int64_t)
tmp_includes="$curl_includes_inttypes"
tmp_source="char f@<:@@:>@ = PRId64;"
@ -2786,7 +2901,7 @@ AC_DEFUN([DO_CURL_OFF_T_SUFFIX_CHECK], [
curl_suffix_curl_off_t="unknown"
curl_suffix_curl_off_tu="unknown"
#
case XC_SH_TR_SH([$1]) in
case AS_TR_SH([$1]) in
long_long | __longlong | __longlong_t)
tst_suffixes="LL::"
;;
@ -2887,6 +3002,7 @@ AC_DEFUN([CURL_CONFIGURE_CURL_OFF_T], [
#
x_LP64_long=""
x_LP32_long=""
x_LP16_long=""
#
if test "$ac_cv_sizeof_long" -eq "8" &&
test "$ac_cv_sizeof_voidp" -ge "8"; then
@ -2894,6 +3010,9 @@ AC_DEFUN([CURL_CONFIGURE_CURL_OFF_T], [
elif test "$ac_cv_sizeof_long" -eq "4" &&
test "$ac_cv_sizeof_voidp" -ge "4"; then
x_LP32_long="long"
elif test "$ac_cv_sizeof_long" -eq "2" &&
test "$ac_cv_sizeof_voidp" -ge "2"; then
x_LP16_long="long"
fi
#
dnl DO_CURL_OFF_T_CHECK results are stored in next 3 vars
@ -2927,6 +3046,17 @@ AC_DEFUN([CURL_CONFIGURE_CURL_OFF_T], [
done
AC_MSG_RESULT([$curl_typeof_curl_off_t])
fi
if test "$curl_typeof_curl_off_t" = "unknown"; then
AC_MSG_CHECKING([for 16-bit curl_off_t data type])
for t2 in \
"$x_LP16_long" \
'int16_t' \
'__int16' \
'int' ; do
DO_CURL_OFF_T_CHECK([$t2], [2])
done
AC_MSG_RESULT([$curl_typeof_curl_off_t])
fi
if test "$curl_typeof_curl_off_t" = "unknown"; then
AC_MSG_ERROR([cannot find data type for curl_off_t.])
fi
@ -2943,7 +3073,7 @@ AC_DEFUN([CURL_CONFIGURE_CURL_OFF_T], [
curl_format_curl_off_tu=`echo "$curl_format_curl_off_tu" | "$SED" 's/D$/U/'`
else
x_pull_headers="no"
case XC_SH_TR_SH([$curl_typeof_curl_off_t]) in
case AS_TR_SH([$curl_typeof_curl_off_t]) in
long_long | __longlong | __longlong_t)
curl_format_curl_off_t="lld"
curl_format_curl_off_tu="llu"
@ -3077,8 +3207,8 @@ AC_DEFUN([CURL_EXPORT_PCDIR], [
dnl CURL_CHECK_PKGCONFIG ($module, [$pcdir])
dnl ------------------------
dnl search for the pkg-config tool. Set the PKGCONFIG variable to hold the
dnl path to it, or 'no' if not found/present.
dnl search for the pkg-config tool (if not cross-compiling). Set the PKGCONFIG
dnl variable to hold the path to it, or 'no' if not found/present.
dnl
dnl If pkg-config is present, check that it has info about the $module or
dnl return "no" anyway!
@ -3087,14 +3217,20 @@ dnl Optionally PKG_CONFIG_LIBDIR may be given as $pcdir.
dnl
AC_DEFUN([CURL_CHECK_PKGCONFIG], [
if test -n "$PKG_CONFIG"; then
PKGCONFIG="$PKG_CONFIG"
else
AC_PATH_TOOL([PKGCONFIG], [pkg-config], [no],
[$PATH:/usr/bin:/usr/local/bin])
PKGCONFIG="no"
if test x$cross_compiling = xyes; then
dnl see if there's a pkg-specific for this host setup
AC_PATH_PROG( PKGCONFIG, ${host}-pkg-config, no,
$PATH:/usr/bin:/usr/local/bin)
fi
if test "x$PKGCONFIG" != "xno"; then
if test x$PKGCONFIG = xno; then
AC_PATH_PROG( PKGCONFIG, pkg-config, no, $PATH:/usr/bin:/usr/local/bin)
fi
if test x$PKGCONFIG != xno; then
AC_MSG_CHECKING([for $1 options with pkg-config])
dnl ask pkg-config about $1
itexists=`CURL_EXPORT_PCDIR([$2]) dnl
@ -3151,48 +3287,3 @@ use vars qw(
1;
_EOF
])
dnl CURL_CPP_P
dnl
dnl Check if $cpp -P should be used for extract define values due to gcc 5
dnl splitting up strings and defines between line outputs. gcc by default
dnl (without -P) will show TEST EINVAL TEST as
dnl
dnl # 13 "conftest.c"
dnl TEST
dnl # 13 "conftest.c" 3 4
dnl 22
dnl # 13 "conftest.c"
dnl TEST
AC_DEFUN([CURL_CPP_P], [
AC_MSG_CHECKING([if cpp -P is needed])
AC_EGREP_CPP([TEST.*TEST], [
#include <errno.h>
TEST EINVAL TEST
], [cpp=no], [cpp=yes])
AC_MSG_RESULT([$cpp])
dnl we need cpp -P so check if it works then
if test "x$cpp" = "xyes"; then
AC_MSG_CHECKING([if cpp -P works])
OLDCPPFLAGS=$CPPFLAGS
CPPFLAGS="$CPPFLAGS -P"
AC_EGREP_CPP([TEST.*TEST], [
#include <errno.h>
TEST EINVAL TEST
], [cpp_p=yes], [cpp_p=no])
AC_MSG_RESULT([$cpp_p])
if test "x$cpp_p" = "xno"; then
AC_MSG_WARN([failed to figure out cpp -P alternative])
# without -P
CPPPFLAGS=$OLDCPPFLAGS
else
# with -P
CPPPFLAGS=$CPPFLAGS
fi
dnl restore CPPFLAGS
CPPFLAGS=$OLDCPPFLAGS
fi
])

View File

@ -1,35 +0,0 @@
version: 7.47.0.{build}
environment:
matrix:
- PRJ_GEN: "Visual Studio 11 2012 Win64"
BDIR: msvc2012
PRJ_CFG: Release
OPENSSL: OFF
- PRJ_GEN: "Visual Studio 12 2013 Win64"
BDIR: msvc2013
PRJ_CFG: Release
OPENSSL: OFF
- PRJ_GEN: "Visual Studio 14 2015 Win64"
BDIR: msvc2015
PRJ_CFG: Release
OPENSSL: OFF
- PRJ_GEN: "Visual Studio 11 2012 Win64"
BDIR: msvc2012
PRJ_CFG: Release
OPENSSL: ON
- PRJ_GEN: "Visual Studio 12 2013 Win64"
BDIR: msvc2013
PRJ_CFG: Release
OPENSSL: ON
- PRJ_GEN: "Visual Studio 14 2015 Win64"
BDIR: msvc2015
PRJ_CFG: Release
OPENSSL: ON
build_script:
- mkdir build.%BDIR%
- cd build.%BDIR%
- cmake .. -G"%PRJ_GEN%" -DCMAKE_USE_OPENSSL=%OPENSSL%
- cmake --build . --config %PRJ_CFG% --clean-first

199
buildconf
View File

@ -6,11 +6,11 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2014, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2010, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
@ -21,24 +21,23 @@
#
###########################################################################
#--------------------------------------------------------------------------
# die prints argument string to stdout and exits this shell script.
#
die(){
echo "buildconf: $@"
exit 1
echo "$@"
exit
}
#--------------------------------------------------------------------------
# findtool works as 'which' but we use a different name to make it more
# obvious we aren't using 'which'! ;-)
# Unlike 'which' does, the current directory is ignored.
#
findtool(){
file="$1"
if { echo "$file" | grep "/" >/dev/null 2>&1; } then
# when file is given with a path check it first
if { echo $file | grep "/" >/dev/null 2>&1; } then
# we only check for the explicit file name if the file is given
# including a slash. Use ./ for current dir. Previously this would
# otherwise always cause findtool to search the local dir first, which
# is wrong.
if test -f "$file"; then
echo "$file"
return
@ -50,7 +49,7 @@ findtool(){
do
IFS=$old_IFS
# echo "checks for $file in $path" >&2
if test "$path" -a "$path" != '.' -a -f "$path/$file"; then
if test -f "$path/$file"; then
echo "$path/$file"
return
fi
@ -81,19 +80,16 @@ removethis(){
# Ensure that buildconf runs from the subdirectory where configure.ac lives
#
if test ! -f configure.ac ||
test ! -f src/tool_main.c ||
test ! -f src/main.c ||
test ! -f lib/urldata.h ||
test ! -f include/curl/curl.h ||
test ! -f m4/curl-functions.m4; then
test ! -f include/curl/curl.h; then
echo "Can not run buildconf from outside of curl's source subdirectory!"
echo "Change to the subdirectory where buildconf is found, and try again."
exit 1
fi
#--------------------------------------------------------------------------
# autoconf 2.57 or newer. Unpatched version 2.67 does not generate proper
# configure script. Unpatched version 2.68 is simply unusable, we should
# disallow 2.68 usage.
# autoconf 2.57 or newer
#
need_autoconf="2.57"
ac_version=`${AUTOCONF:-autoconf} --version 2>/dev/null|head -n 1| sed -e 's/^[^0-9]*//' -e 's/[a-z]* *$//'`
@ -112,15 +108,7 @@ if test "$1" = "2" -a "$2" -lt "57" || test "$1" -lt "2"; then
exit 1
fi
if test "$1" = "2" -a "$2" -eq "67"; then
echo "buildconf: autoconf version $ac_version (BAD)"
echo " Unpatched version generates broken configure script."
elif test "$1" = "2" -a "$2" -eq "68"; then
echo "buildconf: autoconf version $ac_version (BAD)"
echo " Unpatched version generates unusable configure script."
else
echo "buildconf: autoconf version $ac_version (ok)"
fi
echo "buildconf: autoconf version $ac_version (ok)"
am4te_version=`${AUTOM4TE:-autom4te} --version 2>/dev/null|head -n 1| sed -e 's/autom4te\(.*\)/\1/' -e 's/^[^0-9]*//' -e 's/[a-z]* *$//'`
if test -z "$am4te_version"; then
@ -190,67 +178,72 @@ else
fi
#--------------------------------------------------------------------------
# GNU libtoolize preliminary check
# libtool check
#
want_lt_major=1
want_lt_minor=4
want_lt_patch=2
want_lt_version=1.4.2
LIBTOOL_WANTED_MAJOR=1
LIBTOOL_WANTED_MINOR=4
LIBTOOL_WANTED_PATCH=2
LIBTOOL_WANTED_VERSION=1.4.2
# This approach that tries 'glibtoolize' first is intended for systems that
# have GNU libtool named as 'glibtoolize' and libtoolize not being GNU's.
libtoolize=`findtool glibtoolize 2>/dev/null`
if test ! -x "$libtoolize"; then
libtoolize=`findtool ${LIBTOOLIZE:-libtoolize}`
fi
if test -z "$libtoolize"; then
echo "buildconf: libtoolize not found."
echo " You need GNU libtoolize $want_lt_version or newer installed."
exit 1
# this approach that tries 'glibtool' first is some kind of work-around for
# some BSD-systems I believe that use to provide the GNU libtool named
# glibtool, with 'libtool' being something completely different.
libtool=`findtool glibtool 2>/dev/null`
if test ! -x "$libtool"; then
libtool=`findtool ${LIBTOOL:-libtool}`
fi
lt_pver=`$libtoolize --version 2>/dev/null|head -n 1`
if test -z "$LIBTOOLIZE"; then
# set the LIBTOOLIZE here so that glibtoolize is used if glibtool was found
# $libtool is already the full path
libtoolize="${libtool}ize"
else
libtoolize=`findtool $LIBTOOLIZE`
fi
lt_pver=`$libtool --version 2>/dev/null|head -n 1`
lt_qver=`echo $lt_pver|sed -e "s/([^)]*)//g" -e "s/^[^0-9]*//g"`
lt_version=`echo $lt_qver|sed -e "s/[- ].*//" -e "s/\([a-z]*\)$//"`
if test -z "$lt_version"; then
echo "buildconf: libtoolize not found."
echo " You need GNU libtoolize $want_lt_version or newer installed."
echo "buildconf: libtool not found."
echo " You need libtool version $LIBTOOL_WANTED_VERSION or newer installed"
exit 1
fi
old_IFS=$IFS; IFS='.'; set $lt_version; IFS=$old_IFS
lt_major=$1
lt_minor=$2
lt_patch=$3
lt_status="good"
if test -z "$lt_major"; then
lt_status="bad"
elif test "$lt_major" -gt "$want_lt_major"; then
lt_status="good"
elif test "$lt_major" -lt "$want_lt_major"; then
lt_status="bad"
elif test -z "$lt_minor"; then
lt_status="bad"
elif test "$lt_minor" -gt "$want_lt_minor"; then
lt_status="good"
elif test "$lt_minor" -lt "$want_lt_minor"; then
lt_status="bad"
elif test -z "$lt_patch"; then
lt_status="bad"
elif test "$lt_patch" -gt "$want_lt_patch"; then
lt_status="good"
elif test "$lt_patch" -lt "$want_lt_patch"; then
lt_status="bad"
else
lt_status="good"
if test "$lt_major" = "$LIBTOOL_WANTED_MAJOR"; then
if test "$lt_minor" -lt "$LIBTOOL_WANTED_MINOR"; then
lt_status="bad"
elif test -n "$LIBTOOL_WANTED_PATCH"; then
if test "$lt_minor" -gt "$LIBTOOL_WANTED_MINOR"; then
lt_status="good"
elif test -n "$lt_patch"; then
if test "$lt_patch" -lt "$LIBTOOL_WANTED_PATCH"; then
lt_status="bad"
fi
else
lt_status="bad"
fi
fi
fi
if test "$lt_status" != "good"; then
echo "buildconf: libtoolize version $lt_version found."
echo " You need GNU libtoolize $want_lt_version or newer installed."
if test $lt_status != "good"; then
echo "buildconf: libtool version $lt_version found."
echo " You need libtool version $LIBTOOL_WANTED_VERSION or newer installed"
exit 1
fi
echo "buildconf: libtoolize version $lt_version (ok)"
echo "buildconf: libtool version $lt_version (ok)"
if test -f "$libtoolize"; then
echo "buildconf: libtoolize found"
else
echo "buildconf: libtoolize not found. Weird libtool installation!"
exit 1
fi
#--------------------------------------------------------------------------
# m4 check
@ -273,10 +266,6 @@ fi
# perl check
#
PERL=`findtool ${PERL:-perl}`
if test -z "$PERL"; then
echo "buildconf: perl not found"
exit 1
fi
#--------------------------------------------------------------------------
# Remove files generated on previous buildconf/configure run.
@ -312,14 +301,11 @@ for fname in .deps \
libcurl.pc \
libtool \
libtool.m4 \
libtool.m4.tmp \
ltmain.sh \
ltoptions.m4 \
ltsugar.m4 \
ltversion.m4 \
lt~obsolete.m4 \
missing \
install-sh \
stamp-h1 \
stamp-h2 \
stamp-h3 ; do
@ -331,49 +317,32 @@ done
#
echo "buildconf: running libtoolize"
${libtoolize} --copy --force || die "libtoolize command failed"
$libtoolize --copy --automake --force || die "The libtoolize command failed"
# When using libtool 1.5.X (X < 26) we copy libtool.m4 to our local m4
# subdirectory and this local copy is patched to fix some warnings that
# are triggered when running aclocal and using autoconf 2.62 or later.
if test "$lt_major" = "1" && test "$lt_minor" = "5"; then
if test -z "$lt_patch" || test "$lt_patch" -lt "26"; then
echo "buildconf: copying libtool.m4 to local m4 subdir"
ac_dir=`${ACLOCAL:-aclocal} --print-ac-dir`
if test -f $ac_dir/libtool.m4; then
cp -f $ac_dir/libtool.m4 m4/libtool.m4
else
echo "buildconf: $ac_dir/libtool.m4 not found"
fi
if test -f m4/libtool.m4; then
echo "buildconf: renaming some variables in local m4/libtool.m4"
$PERL -i.tmp -pe \
's/lt_prog_compiler_pic_works/lt_cv_prog_compiler_pic_works/g; \
s/lt_prog_compiler_static_works/lt_cv_prog_compiler_static_works/g;' \
m4/libtool.m4
rm -f m4/libtool.m4.tmp
fi
fi
fi
if test -f m4/libtool.m4; then
echo "buildconf: converting all mv to mv -f in local m4/libtool.m4"
$PERL -i.tmp -pe 's/\bmv +([^-\s])/mv -f $1/g' m4/libtool.m4
rm -f m4/libtool.m4.tmp
if test ! -f m4/curl-functions.m4; then
echo "buildconf: cURL m4 macros not found"
exit 1
fi
echo "buildconf: running aclocal"
${ACLOCAL:-aclocal} -I m4 $ACLOCAL_FLAGS || die "aclocal command failed"
${ACLOCAL:-aclocal} -I m4 $ACLOCAL_FLAGS || die "The aclocal command line failed"
echo "buildconf: converting all mv to mv -f in local aclocal.m4"
$PERL -i.bak -pe 's/\bmv +([^-\s])/mv -f $1/g' aclocal.m4
if test -n "$PERL"; then
echo "buildconf: running aclocal hack to convert all mv to mv -f"
$PERL -i.bak -pe 's/\bmv +([^-\s])/mv -f $1/g' aclocal.m4
else
echo "buildconf: perl not found"
exit 1
fi
echo "buildconf: running autoheader"
${AUTOHEADER:-autoheader} || die "autoheader command failed"
${AUTOHEADER:-autoheader} || die "The autoheader command failed"
echo "buildconf: cp lib/curl_config.h.in src/curl_config.h.in"
cp lib/curl_config.h.in src/curl_config.h.in
echo "buildconf: running autoconf"
${AUTOCONF:-autoconf} || die "autoconf command failed"
${AUTOCONF:-autoconf} || die "The autoconf command failed"
if test -d ares; then
cd ares
@ -383,15 +352,14 @@ if test -d ares; then
fi
echo "buildconf: running automake"
${AUTOMAKE:-automake} --add-missing --copy || die "automake command failed"
${AUTOMAKE:-automake} -a -c || die "The automake command failed"
#--------------------------------------------------------------------------
# GNU libtool complementary check
#
# Depending on the libtool and automake versions being used, config.guess
# might not be installed in the subdirectory until automake has finished.
# So we can not attempt to use it until this very last buildconf stage.
#
if test ! -f ./config.guess; then
echo "buildconf: config.guess not found"
else
@ -435,7 +403,7 @@ else
if test "$lt_status" != "good"; then
need_lt_version="$need_lt_major.$need_lt_minor.$need_lt_patch"
echo "buildconf: libtool version $lt_version found."
echo " $buildhost requires GNU libtool $need_lt_version or newer installed."
echo " $buildhost requires libtool $need_lt_version or newer installed."
rm -f configure
exit 1
fi
@ -443,7 +411,8 @@ else
fi
#--------------------------------------------------------------------------
# Finished successfully.
# Finished succesfully.
#
echo "buildconf: OK"
exit 0

View File

@ -1,350 +1,38 @@
@echo off
rem ***************************************************************************
rem * _ _ ____ _
rem * Project ___| | | | _ \| |
rem * / __| | | | |_) | |
rem * | (__| |_| | _ <| |___
rem * \___|\___/|_| \_\_____|
rem *
rem * Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
rem *
rem * This software is licensed as described in the file COPYING, which
rem * you should have received as part of this distribution. The terms
rem * are also available at https://curl.haxx.se/docs/copyright.html.
rem *
rem * You may opt to use, copy, modify, merge, publish, distribute and/or sell
rem * copies of the Software, and permit persons to whom the Software is
rem * furnished to do so, under the terms of the COPYING file.
rem *
rem * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
rem * KIND, either express or implied.
rem *
rem ***************************************************************************
REM
REM
REM This batch file must be used to set up a git tree to build on
REM systems where there is no autotools support (i.e. Microsoft).
REM
REM This file is not included nor needed for curl's release
REM archives, neither for curl's daily snapshot archives.
rem NOTES
rem
rem This batch file must be used to set up a git tree to build on systems where
rem there is no autotools support (i.e. DOS and Windows).
rem
if exist GIT-INFO goto start_doing
ECHO ERROR: This file shall only be used with a curl git tree checkout.
goto end_all
:start_doing
:begin
rem Set our variables
if "%OS%" == "Windows_NT" setlocal
set MODE=GENERATE
REM create hugehelp.c
if not exist src\hugehelp.c.cvs goto end_hugehelp_c
copy /Y src\hugehelp.c.cvs src\hugehelp.c
:end_hugehelp_c
rem Switch to this batch file's directory
cd /d "%~0\.." 1>NUL 2>&1
REM create Makefile
if not exist Makefile.dist goto end_makefile
copy /Y Makefile.dist Makefile
:end_makefile
rem Check we are running from a curl git repository
if not exist GIT-INFO goto norepo
REM create curlbuild.h
if not exist include\curl\curlbuild.h.dist goto end_curlbuild_h
copy /Y include\curl\curlbuild.h.dist include\curl\curlbuild.h
:end_curlbuild_h
rem Detect programs. HAVE_<PROGNAME>
rem When not found the variable is set undefined. The undefined pattern
rem allows for statements like "if not defined HAVE_PERL (command)"
groff --version <NUL 1>NUL 2>&1
if errorlevel 1 (set HAVE_GROFF=) else (set HAVE_GROFF=Y)
nroff --version <NUL 1>NUL 2>&1
if errorlevel 1 (set HAVE_NROFF=) else (set HAVE_NROFF=Y)
perl --version <NUL 1>NUL 2>&1
if errorlevel 1 (set HAVE_PERL=) else (set HAVE_PERL=Y)
gzip --version <NUL 1>NUL 2>&1
if errorlevel 1 (set HAVE_GZIP=) else (set HAVE_GZIP=Y)
REM setup c-ares git tree
if not exist ares\buildconf.bat goto end_c_ares
cd ares
call buildconf.bat
cd ..
:end_c_ares
:parseArgs
if "%~1" == "" goto start
:end_all
if /i "%~1" == "-clean" (
set MODE=CLEAN
) else if /i "%~1" == "-?" (
goto syntax
) else if /i "%~1" == "-h" (
goto syntax
) else if /i "%~1" == "-help" (
goto syntax
) else (
goto unknown
)
shift & goto parseArgs
:start
if "%MODE%" == "GENERATE" (
echo.
echo Generating prerequisite files
call :generate
if errorlevel 4 goto nogencurlbuild
if errorlevel 3 goto nogenhugehelp
if errorlevel 2 goto nogenmakefile
if errorlevel 1 goto warning
) else (
echo.
echo Removing prerequisite files
call :clean
if errorlevel 3 goto nocleancurlbuild
if errorlevel 2 goto nocleanhugehelp
if errorlevel 1 goto nocleanmakefile
)
goto success
rem Main generate function.
rem
rem Returns:
rem
rem 0 - success
rem 1 - success with simplified tool_hugehelp.c
rem 2 - failed to generate Makefile
rem 3 - failed to generate tool_hugehelp.c
rem 4 - failed to generate curlbuild.h
rem
:generate
if "%OS%" == "Windows_NT" setlocal
set BASIC_HUGEHELP=0
rem Create Makefile
echo * %CD%\Makefile
if exist Makefile.dist (
copy /Y Makefile.dist Makefile 1>NUL 2>&1
if errorlevel 1 (
if "%OS%" == "Windows_NT" endlocal
exit /B 2
)
)
rem Create tool_hugehelp.c
echo * %CD%\src\tool_hugehelp.c
call :genHugeHelp
if errorlevel 2 (
if "%OS%" == "Windows_NT" endlocal
exit /B 3
)
if errorlevel 1 (
set BASIC_HUGEHELP=1
)
cmd /c exit 0
rem Create curlbuild.h
echo * %CD%\include\curl\curlbuild.h
if exist include\curl\curlbuild.h.dist (
copy /Y include\curl\curlbuild.h.dist include\curl\curlbuild.h 1>NUL 2>&1
if errorlevel 1 (
if "%OS%" == "Windows_NT" endlocal
exit /B 4
)
)
rem Setup c-ares git tree
if exist ares\buildconf.bat (
echo.
echo Configuring c-ares build environment
cd ares
call buildconf.bat
cd ..
)
if "%BASIC_HUGEHELP%" == "1" (
if "%OS%" == "Windows_NT" endlocal
exit /B 1
)
if "%OS%" == "Windows_NT" endlocal
exit /B 0
rem Main clean function.
rem
rem Returns:
rem
rem 0 - success
rem 1 - failed to clean Makefile
rem 2 - failed to clean tool_hugehelp.c
rem 3 - failed to clean curlbuild.h
rem
:clean
rem Remove Makefile
echo * %CD%\Makefile
if exist Makefile (
del Makefile 2>NUL
if exist Makefile (
exit /B 1
)
)
rem Remove tool_hugehelp.c
echo * %CD%\src\tool_hugehelp.c
if exist src\tool_hugehelp.c (
del src\tool_hugehelp.c 2>NUL
if exist src\tool_hugehelp.c (
exit /B 2
)
)
rem Remove curlbuild.h
echo * %CD%\include\curl\curlbuild.h
if exist include\curl\curlbuild.h (
del include\curl\curlbuild.h 2>NUL
if exist include\curl\curlbuild.h (
exit /B 3
)
)
exit /B
rem Function to generate src\tool_hugehelp.c
rem
rem Returns:
rem
rem 0 - full tool_hugehelp.c generated
rem 1 - simplified tool_hugehelp.c
rem 2 - failure
rem
:genHugeHelp
if "%OS%" == "Windows_NT" setlocal
set LC_ALL=C
set ROFFCMD=
set BASIC=1
if defined HAVE_PERL (
if defined HAVE_GROFF (
set ROFFCMD=groff -mtty-char -Tascii -P-c -man
) else if defined HAVE_NROFF (
set ROFFCMD=nroff -c -Tascii -man
)
)
if defined ROFFCMD (
echo #include "tool_setup.h"> src\tool_hugehelp.c
echo #include "tool_hugehelp.h">> src\tool_hugehelp.c
if defined HAVE_GZIP (
echo #ifndef HAVE_LIBZ>> src\tool_hugehelp.c
)
%ROFFCMD% docs\curl.1 2>NUL | perl src\mkhelp.pl docs\MANUAL >> src\tool_hugehelp.c
if defined HAVE_GZIP (
echo #else>> src\tool_hugehelp.c
%ROFFCMD% docs\curl.1 2>NUL | perl src\mkhelp.pl -c docs\MANUAL >> src\tool_hugehelp.c
echo #endif /^* HAVE_LIBZ ^*/>> src\tool_hugehelp.c
)
set BASIC=0
) else (
if exist src\tool_hugehelp.c.cvs (
copy /Y src\tool_hugehelp.c.cvs src\tool_hugehelp.c 1>NUL 2>&1
) else (
echo #include "tool_setup.h"> src\tool_hugehelp.c
echo #include "tool_hugehelp.hd">> src\tool_hugehelp.c
echo.>> src\tool_hugehelp.c
echo void hugehelp(void^)>> src\tool_hugehelp.c
echo {>> src\tool_hugehelp.c
echo #ifdef USE_MANUAL>> src\tool_hugehelp.c
echo fputs("Built-in manual not included\n", stdout^);>> src\tool_hugehelp.c
echo #endif>> src\tool_hugehelp.c
echo }>> src\tool_hugehelp.c
)
)
findstr "/C:void hugehelp(void)" src\tool_hugehelp.c 1>NUL 2>&1
if errorlevel 1 (
if "%OS%" == "Windows_NT" endlocal
exit /B 2
)
if "%BASIC%" == "1" (
if "%OS%" == "Windows_NT" endlocal
exit /B 1
)
if "%OS%" == "Windows_NT" endlocal
exit /B 0
rem Function to clean-up local variables under DOS, Windows 3.x and
rem Windows 9x as setlocal isn't available until Windows NT
rem
:dosCleanup
set MODE=
set HAVE_GROFF=
set HAVE_NROFF=
set HAVE_PERL=
set HAVE_GZIP=
set BASIC_HUGEHELP=
set LC_ALL
set ROFFCMD=
set BASIC=
exit /B
:syntax
rem Display the help
echo.
echo Usage: buildconf [-clean]
echo.
echo -clean - Removes the files
goto error
:unknown
echo.
echo Error: Unknown argument '%1'
goto error
:norepo
echo.
echo Error: This batch file should only be used with a curl git repository
goto error
:nogenmakefile
echo.
echo Error: Unable to generate Makefile
goto error
:nogenhugehelp
echo.
echo Error: Unable to generate src\tool_hugehelp.c
goto error
:nogencurlbuild
echo.
echo Error: Unable to generate include\curl\curlbuild.h
goto error
:nocleanmakefile
echo.
echo Error: Unable to clean Makefile
goto error
:nocleanhugehelp
echo.
echo Error: Unable to clean src\tool_hugehelp.c
goto error
:nocleancurlbuild
echo.
echo Error: Unable to clean include\curl\curlbuild.h
goto error
:warning
echo.
echo Warning: The curl manual could not be integrated in the source. This means when
echo you build curl the manual will not be available (curl --man^). Integration of
echo the manual is not required and a summary of the options will still be available
echo (curl --help^). To integrate the manual your PATH is required to have
echo groff/nroff, perl and optionally gzip for compression.
goto success
:error
if "%OS%" == "Windows_NT" (
endlocal
) else (
call :dosCleanup
)
exit /B 1
:success
if "%OS%" == "Windows_NT" (
endlocal
) else (
call :dosCleanup
)
exit /B 0

File diff suppressed because it is too large Load Diff

View File

@ -6,11 +6,11 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 2001 - 2012, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 2001 - 2010, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
@ -24,7 +24,6 @@
prefix=@prefix@
exec_prefix=@exec_prefix@
includedir=@includedir@
cppflag_curl_staticlib=@CPPFLAG_CURL_STATICLIB@
usage()
{
@ -71,106 +70,96 @@ while test $# -gt 0; do
;;
--ca)
echo @CURL_CA_BUNDLE@
;;
echo "@CURL_CA_BUNDLE@"
;;
--cc)
echo "@CC@"
;;
echo "@CC@"
;;
--prefix)
echo "$prefix"
;;
echo "$prefix"
;;
--feature|--features)
for feature in @SUPPORT_FEATURES@ ""; do
test -n "$feature" && echo "$feature"
done
;;
;;
--protocols)
for protocol in @SUPPORT_PROTOCOLS@; do
echo "$protocol"
done
;;
;;
--version)
echo libcurl @CURLVERSION@
exit 0
;;
echo libcurl @VERSION@
exit 0
;;
--checkfor)
checkfor=$2
cmajor=`echo $checkfor | cut -d. -f1`
cminor=`echo $checkfor | cut -d. -f2`
# when extracting the patch part we strip off everything after a
# dash as that's used for things like version 1.2.3-CVS
cpatch=`echo $checkfor | cut -d. -f3 | cut -d- -f1`
# dash as that's used for things like version 1.2.3-CVS
cpatch=`echo $checkfor | cut -d. -f3 | cut -d- -f1`
checknum=`echo "$cmajor*256*256 + $cminor*256 + ${cpatch:-0}" | bc`
numuppercase=`echo @VERSIONNUM@ | tr 'a-f' 'A-F'`
nownum=`echo "obase=10; ibase=16; $numuppercase" | bc`
if test "$nownum" -ge "$checknum"; then
# silent success
exit 0
else
echo "requested version $checkfor is newer than existing @CURLVERSION@"
exit 1
fi
;;
if test "$nownum" -ge "$checknum"; then
# silent success
exit 0
else
echo "requested version $checkfor is newer than existing @VERSION@"
exit 1
fi
;;
--vernum)
echo @VERSIONNUM@
exit 0
;;
echo @VERSIONNUM@
exit 0
;;
--help)
usage 0
;;
usage 0
;;
--cflags)
if test "X$cppflag_curl_staticlib" = "X-DCURL_STATICLIB"; then
CPPFLAG_CURL_STATICLIB="-DCURL_STATICLIB "
if test "X@includedir@" = "X/usr/include"; then
echo ""
else
CPPFLAG_CURL_STATICLIB=""
echo "-I@includedir@"
fi
if test "X@includedir@" = "X/usr/include"; then
echo "$CPPFLAG_CURL_STATICLIB"
else
echo "${CPPFLAG_CURL_STATICLIB}-I@includedir@"
fi
;;
;;
--libs)
if test "X@libdir@" != "X/usr/lib" -a "X@libdir@" != "X/usr/lib64"; then
CURLLIBDIR="-L@libdir@ "
else
CURLLIBDIR=""
fi
if test "X@REQUIRE_LIB_DEPS@" = "Xyes"; then
echo ${CURLLIBDIR}-lcurl @LIBCURL_LIBS@
else
echo ${CURLLIBDIR}-lcurl
fi
;;
if test "X@libdir@" != "X/usr/lib" -a "X@libdir@" != "X/usr/lib64"; then
CURLLIBDIR="-L@libdir@ "
else
CURLLIBDIR=""
fi
if test "X@REQUIRE_LIB_DEPS@" = "Xyes"; then
echo ${CURLLIBDIR}-lcurl @LDFLAGS@ @LIBCURL_LIBS@ @LIBS@
else
echo ${CURLLIBDIR}-lcurl @LDFLAGS@ @LIBS@
fi
;;
--static-libs)
if test "X@ENABLE_STATIC@" != "Xno" ; then
echo @libdir@/libcurl.@libext@ @LDFLAGS@ @LIBCURL_LIBS@
else
echo "curl was built with static libraries disabled" >&2
exit 1
fi
;;
echo @libdir@/libcurl.@libext@ @LDFLAGS@ @LIBCURL_LIBS@ @LIBS@
;;
--configure)
echo @CONFIGURE_OPTIONS@
;;
echo @CONFIGURE_OPTIONS@
;;
*)
echo "unknown option: $1"
usage 1
;;
usage 1
;;
esac
shift
done

50
curl-style.el Normal file
View File

@ -0,0 +1,50 @@
;;;; Emacs Lisp help for writing curl code. ;;;;
;;; The curl hacker's C conventions.
;;; See the sample.emacs file on how this file can be made to take
;;; effect automatically when editing curl source files.
(defconst curl-c-style
'((c-basic-offset . 2)
(c-comment-only-line-offset . 0)
(c-hanging-braces-alist . ((substatement-open before after)))
(c-offsets-alist . ((topmost-intro . 0)
(topmost-intro-cont . 0)
(substatement . +)
(substatement-open . 0)
(statement-case-intro . +)
(statement-case-open . 0)
(case-label . 0)
))
)
"Curl C Programming Style")
(defun curl-code-cleanup ()
"no docs"
(interactive)
(untabify (point-min) (point-max))
(delete-trailing-whitespace)
)
;; Customizations for all of c-mode, c++-mode, and objc-mode
(defun curl-c-mode-common-hook ()
"Curl C mode hook"
;; add curl style and set it for the current buffer
(c-add-style "curl" curl-c-style t)
(setq tab-width 8
indent-tabs-mode nil ; Use spaces. Not tabs.
comment-column 40
c-font-lock-extra-types (append '("bool" "CURL" "CURLcode" "ssize_t" "size_t" "curl_socklen_t" "fd_set" "time_t" "curl_off_t" "curl_socket_t" "in_addr_t" "CURLSHcode" "CURLMcode" "Curl_addrinfo"))
)
;; keybindings for C, C++, and Objective-C. We can put these in
;; c-mode-base-map because of inheritance ...
(define-key c-mode-base-map "\M-q" 'c-fill-paragraph)
(define-key c-mode-base-map "\M-m" 'curl-code-cleanup)
(setq c-recognize-knr-p nil)
;;; (add-hook 'write-file-hooks 'delete-trailing-whitespace t)
(setq show-trailing-whitespace t)
)
;; Set this is in your .emacs if you want to use the c-mode-hook as
;; defined here right out of the box.
; (add-hook 'c-mode-common-hook 'curl-c-mode-common-hook)

View File

@ -6,14 +6,14 @@
libcurl bindings
Creative people have written bindings or interfaces for various environments
and programming languages. Using one of these allows you to take advantage of
curl powers from within your favourite language or system.
Creative people have written bindings or interfaces for various environments
and programming languages. Using one of these allows you to take advantage of
curl powers from within your favourite language or system.
This is a list of all known interfaces as of this writing.
This is a list of all known interfaces as of this writing.
The bindings listed below are not part of the curl/libcurl distribution
archives, but must be downloaded and installed separately.
The bindings listed below are not part of the curl/libcurl distribution
archives, but must be downloaded and installed separately.
Ada95
@ -27,7 +27,7 @@ Basic
C
libcurl is a C library in itself!
https://curl.haxx.se/libcurl/
http://curl.haxx.se/libcurl/
C++
@ -41,16 +41,13 @@ Ch
Cocoa
BBHTTP: written by Bruno de Carvalho
https://github.com/brunodecarvalho/BBHTTP
curlhandle: Written by Dan Wood
Written by Dan Wood
http://curlhandle.sourceforge.net/
D
Written by Kenneth Bogert
http://dlang.org/library/std/net/curl.html
http://curl.haxx.se/libcurl/d/
Dylan
@ -58,9 +55,8 @@ Dylan
http://dylanlibs.sourceforge.net/
Eiffel
Written by Eiffel Software
https://room.eiffel.com/library/curl
http://curl.haxx.se/libcurl/eiffel/
Euphoria
@ -78,23 +74,13 @@ Ferite
Gambas
http://gambas.sourceforge.net/
http://gambas.sourceforge.net
glib/GTK+
Written by Richard Atterer
http://atterer.net/glibcurl/
Guile:
Written by Michael L. Gran
http://www.lonelycactus.com/guile-curl.html
Harbour
Written by Viktor Szakáts
https://github.com/vszakats/harbour-core/tree/master/contrib/hbcurl
Haskell
Written by Galois, Inc
@ -102,12 +88,8 @@ Haskell
Java
https://github.com/pjlegato/curl-java
Julia
Written by Paul Howe
https://github.com/forio/Curl.jl
Maintained by [blank]
http://curl.haxx.se/libcurl/java/
Lisp
@ -119,7 +101,7 @@ Lua
luacurl by Alexander Marinov
http://luacurl.luaforge.net/
Lua-cURL by Jürgen Hötzel
Lua-cURL by Jürgen Hötzel
http://luaforge.net/projects/lua-curl/
Mono
@ -130,12 +112,7 @@ Mono
.NET
libcurl-net by Jeffrey Phillips
https://sourceforge.net/projects/libcurl-net/
node.js
node-libcurl by Jonathan Cardoso Machado
https://github.com/JCMais/node-libcurl
http://sourceforge.net/projects/libcurl-net/
Object-Pascal
@ -145,7 +122,7 @@ Object-Pascal
O'Caml
Written by Lars Nilsson
https://sourceforge.net/projects/ocurl/
http://sourceforge.net/projects/ocurl/
Pascal
@ -154,13 +131,13 @@ Pascal
Perl
Maintained by Cris Bailiff and Bálint Szilakszi
https://github.com/szbalint/WWW--Curl
Maintained by Cris Bailiff
http://curl.haxx.se/libcurl/perl/
PHP
Written by Sterling Hughes
https://php.net/curl
http://curl.haxx.se/libcurl/php/
PostgreSQL
@ -174,7 +151,8 @@ Python
R
http://cran.r-project.org/package=curl
RCurl by Duncan Temple Lang
http://www.omegahat.org/RCurl/
Rexx
@ -184,7 +162,7 @@ Rexx
RPG
Support for ILE/RPG on OS/400 is included in source distribution
https://curl.haxx.se/libcurl/
http://curl.haxx.se/libcurl/
See packages/OS400/README.OS400 and packages/OS400/curl.inc.in
Ruby
@ -195,15 +173,10 @@ Ruby
ruby-curl-multi - written by Kristjan Petursson and Keith Rarick
http://curl-multi.rubyforge.org/
Rust
curl-rust - by Carl Lerche
https://github.com/carllerche/curl-rust
Scheme
Bigloo binding by Kirill Lisovsky
http://www.metapaper.net/lisovsky/web/curl/
http://curl.haxx.se/libcurl/scheme/
S-Lang
@ -227,13 +200,13 @@ SPL
Tcl
Tclcurl by Andrés García
http://mirror.yellow5.com/tclcurl/
Tclcurl by Andrés García
http://personal1.iddeo.es/andresgarci/tclcurl/english/docs.html
Visual Basic
libcurl-vb by Jeffrey Phillips
https://sourceforge.net/projects/libcurl-vb/
http://sourceforge.net/projects/libcurl-vb/
Visual Foxpro
@ -253,8 +226,3 @@ XBLite
Written by David Szafranski
http://perso.wanadoo.fr/xblite/libraries.html
Xojo
Written by Andrew Lambert
https://github.com/charonn0/RB-libcURL

View File

@ -6,54 +6,39 @@
BUGS
1. Bugs
1.1 There are still bugs
1.2 Where to report
1.3 What to report
1.4 libcurl problems
1.5 Who will fix the problems
1.6 How to get a stack trace
1.7 Bugs in libcurl bindings
==============================================================================
1.1 There are still bugs
Curl and libcurl have grown substantially since the beginning. At the time
of writing (January 2013), there are about 83,000 lines of source code, and
by the time you read this it has probably grown even more.
of writing (July 2007), there are about 47000 lines of source code, and by
the time you read this it has probably grown even more.
Of course there are lots of bugs left. And lots of misfeatures.
To help us make curl the stable and solid product we want it to be, we need
bug reports and bug fixes.
1.2 Where to report
WHERE TO REPORT
If you can't fix a bug yourself and submit a fix for it, try to report an as
detailed report as possible to a curl mailing list to allow one of us to
have a go at a solution. You can optionally also post your bug/problem at
curl's bug tracking system over at
have a go at a solution. You should also post your bug/problem at curl's bug
tracking system over at
https://github.com/curl/curl/issues
http://sourceforge.net/bugs/?group_id=976
Please read the rest of this document below first before doing that!
(but please read the sections below first before doing that)
If you feel you need to ask around first, find a suitable mailing list and
post there. The lists are available on https://curl.haxx.se/mail/
post there. The lists are available on http://curl.haxx.se/mail/
1.3 What to report
WHAT TO REPORT
When reporting a bug, you should include all information that will help us
understand what's wrong, what you expected to happen and how to repeat the
bad behavior. You therefore need to tell us:
- your operating system's name and version number
- your operating system's name and version number (uname -a under a unix
is fine)
- what version of curl you're using (curl -V is fine)
- versions of the used libraries that libcurl is built to use
- what URL you were working with (if possible), at least which protocol
and anything and everything else you think matters. Tell us what you
@ -74,48 +59,7 @@ BUGS
The address and how to subscribe to the mailing lists are detailed in the
MANUAL file.
1.4 libcurl problems
First, post all libcurl problems on the curl-library mailing list.
When you've written your own application with libcurl to perform transfers,
it is even more important to be specific and detailed when reporting bugs.
Tell us the libcurl version and your operating system. Tell us the name and
version of all relevant sub-components like for example the SSL library
you're using and what name resolving your libcurl uses. If you use SFTP or
SCP, the libssh2 version is relevant etc.
Showing us a real source code example repeating your problem is the best way
to get our attention and it will greatly increase our chances to understand
your problem and to work on a fix (if we agree it truly is a problem).
Lots of problems that appear to be libcurl problems are actually just abuses
of the libcurl API or other malfunctions in your applications. It is advised
that you run your problematic program using a memory debug tool like
valgrind or similar before you post memory-related or "crashing" problems to
us.
1.5 Who will fix the problems
If the problems or bugs you describe are considered to be bugs, we want to
have the problems fixed.
There are no developers in the curl project that are paid to work on bugs.
All developers that take on reported bugs do this on a voluntary basis. We
do it out of an ambition to keep curl and libcurl excellent products and out
of pride.
But please do not assume that you can just lump over something to us and it
will then magically be fixed after some given time. Most often we need
feedback and help to understand what you've experienced and how to repeat a
problem. Then we may only be able to assist YOU to debug the problem and to
track down the proper fix.
We get reports from many people every month and each report can take a
considerable amount of time to really go to the bottom with.
1.6 How to get a stack trace
HOW TO GET A STACK TRACE
First, you must make sure that you compile all sources with -g and that you
don't 'strip' the final executable. Try to avoid optimizing the code as
@ -135,12 +79,3 @@ BUGS
crashed. Include the stack trace with your detailed bug report. It'll help a
lot.
1.7 Bugs in libcurl bindings
There will of course pop up bugs in libcurl bindings. You should then
primarily approach the team that works on that particular binding and see
what you can do to help them fix the problem.
If you suspect that the problem exists in the underlying libcurl, then
please convert your program over to plain C and follow the steps outlined
above.

View File

@ -1,32 +0,0 @@
Contributor Code of Conduct
===========================
As contributors and maintainers of this project, we pledge to respect all
people who contribute through reporting issues, posting feature requests,
updating documentation, submitting pull requests or patches, and other
activities.
We are committed to making participation in this project a harassment-free
experience for everyone, regardless of level of experience, gender, gender
identity and expression, sexual orientation, disability, personal appearance,
body size, race, ethnicity, age, or religion.
Examples of unacceptable behavior by participants include the use of sexual
language or imagery, derogatory comments or personal attacks, trolling, public
or private harassment, insults, or other unprofessional conduct.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct. Project maintainers who do not
follow the Code of Conduct may be removed from the project team.
This code of conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by opening an issue or contacting one or more of the project
maintainers.
This Code of Conduct is adapted from the [Contributor
Covenant](http://contributor-covenant.org), version 1.1.0, available at
[http://contributor-covenant.org/version/1/1/0/](http://contributor-covenant.org/version/1/1/0/)

View File

@ -1,185 +0,0 @@
# cURL C code style
Source code that has a common style is easier to read than code that uses
different styles in different places. It helps making the code feel like one
single code base. Easy-to-read is a very important property of code and helps
making it easier to review when new things are added and it helps debugging
code when developers are trying to figure out why things go wrong. A unified
style is more important than individual contributors having their own personal
tastes satisfied.
Our C code has a few style rules. Most of them are verified and upheld by the
lib/checksrc.pl script. Invoked with `make checksrc` or even by default by the
build system when built after `./configure --enable-debug` has been used.
It is normally not a problem for anyone to follow the guidelines, as you just
need to copy the style already used in the source code and there are no
particularly unusual rules in our set of rules.
We also work hard on writing code that are warning-free on all the major
platforms and in general on as many platforms as possible. Code that obviously
will cause warnings will not be accepted as-is.
## Naming
Try using a non-confusing naming scheme for your new functions and variable
names. It doesn't necessarily have to mean that you should use the same as in
other places of the code, just that the names should be logical,
understandable and be named according to what they're used for. File-local
functions should be made static. We like lower case names.
See the INTERNALS document on how we name non-exported library-global symbols.
## Indenting
We use only spaces for indentation, never TABs. We use two spaces for each new
open brace.
if(something_is_true) {
while(second_statement == fine) {
moo();
}
}
## Comments
Since we write C89 code, `//` comments are not allowed. They weren't
introduced in the C standard until C99. We use only `/*` and `*/` comments:
/* this is a comment */
## Long lines
Source code in curl may never be wider than 80 columns and there are two
reasons for maintaining this even in the modern era of very large and high
resolution screens:
1. Narrower columns are easier to read than very wide ones. There's a reason
newspapers have used columns for decades or centuries.
2. Narrower columns allow developers to easier show multiple pieces of code
next to each other in different windows. I often have two or three source
code windows next to each other on the same screen - as well as multiple
terminal and debugging windows.
## Braces
In if/while/do/for expressions, we write the open brace on the same line as
the keyword and we then set the closing brace on the same indentation level as
the initial keyword. Like this:
if(age < 40) {
/* clearly a youngster */
}
When we write functions however, the opening brace should be in the first
column of the first line:
int main(int argc, char **argv)
{
return 1;
}
## 'else' on the following line
When adding an `else` clause to a conditional expression using braces, we add
it on a new line after the closing brace. Like this:
if(age < 40) {
/* clearly a youngster */
}
else {
/* probably grumpy */
}
## No space before parentheses
When writing expressions using if/while/do/for, there shall be no space
between the keyword and the open parenthesis. Like this:
while(1) {
/* loop forever */
}
## Use boolean conditions
Rather than test a conditional value such as a bool against TRUE or FALSE, a
pointer against NULL or != NULL and an int against zero or not zero in
if/while conditions we prefer:
result = do_something();
if(!result) {
/* something went wrong */
return result;
}
## No assignments in conditions
To increase readability and reduce complexity of conditionals, we avoid
assigning variables within if/while conditions. We frown upon this style:
if((ptr = malloc(100)) == NULL)
return NULL;
and instead we encourage the above version to be spelled out more clearly:
ptr = malloc(100);
if(!ptr)
return NULL;
## New block on a new line
We never write multiple statements on the same source line, even for very
short if() conditions.
if(a)
return TRUE;
else if(b)
return FALSE;
and NEVER:
if(a) return TRUE;
else if(b) return FALSE;
## Space around operators
Please use spaces on both sides of operators in C expressions. Postfix `(),
[], ->, ., ++, --` and Unary `+, - !, ~, &` operators excluded they should
have no space.
Examples:
bla = func();
who = name[0];
age += 1;
true = !false;
size += -2 + 3 * (a + b);
ptr->member = a++;
struct.field = b--;
ptr = &address;
contents = *pointer;
complement = ~bits;
## Platform dependent code
Use `#ifdef HAVE_FEATURE` to do conditional code. We avoid checking for
particular operating systems or hardware in the #ifdef lines. The HAVE_FEATURE
shall be generated by the configure script for unix-like systems and they are
hard-coded in the config-[system].h files for the others.
We also encourage use of macros/functions that possibly are empty or defined
to constants when libcurl is built without that feature, to make the code
seamless. Like this style where the `magic()` function works differently
depending on a build-time conditional:
#ifdef HAVE_MAGIC
void magic(int a)
{
return a + 2;
}
#else
#define magic(x) 1
#endif
int content = magic(3);

View File

@ -15,13 +15,18 @@
1.2 License
1.3 What To Read
2. Write a good patch
2.1 Follow code style
2.2 Non-clobbering All Over
2.3 Write Separate Patches
2.4 Patch Against Recent Sources
2.5 Document
2.6 Test Cases
2. cURL Coding Standards
2.1 Naming
2.2 Indenting
2.3 Commenting
2.4 Line Lengths
2.5 General Style
2.6 Non-clobbering All Over
2.7 Platform Dependent Code
2.8 Write Separate Patches
2.9 Patch Against Recent Sources
2.10 Document
2.11 Test Cases
3. Pushing Out Your Changes
3.1 Write Access to git Repository
@ -29,7 +34,6 @@
3.3 How To Make a Patch without git
3.4 How to get your changes into the main sources
3.5 Write good commit messages
3.6 About pull requests
==============================================================================
@ -37,20 +41,16 @@
1.1 Join the Community
Skip over to https://curl.haxx.se/mail/ and join the appropriate mailing
Skip over to http://curl.haxx.se/mail/ and join the appropriate mailing
list(s). Read up on details before you post questions. Read this file before
you start sending patches! We prefer patches and discussions being held on
the mailing list(s), not sent to individuals.
Before posting to one of the curl mailing lists, please read up on the mailing
list etiquette: https://curl.haxx.se/mail/etiquette.html
list etiquette: http://curl.haxx.se/mail/etiquette.html
We also hang out on IRC in #curl on irc.freenode.net
If you're at all interested in the code side of things, consider clicking
'watch' on the curl repo at github to get notified on pull requests and new
issues posted there.
1.2. License
When contributing with code, you agree to put your changes and new code under
@ -77,20 +77,53 @@
1.3 What To Read
Source code, the man pages, the INTERNALS document, TODO, KNOWN_BUGS and the
most recent changes in the git log. Just lurking on the curl-library mailing
list is gonna give you a lot of insights on what's going on right now. Asking
there is a good idea too.
Source code, the man pages, the INTERNALS document, TODO, KNOWN_BUGS, the
most recent CHANGES. Just lurking on the libcurl mailing list is gonna give
you a lot of insights on what's going on right now. Asking there is a good
idea too.
2. Write a good patch
2. cURL Coding Standards
2.1 Follow code style
2.1 Naming
When writing C code, follow the CODE_STYLE already established in the
project. Consistent style makes code easier to read and mistakes less likely
to happen.
Try using a non-confusing naming scheme for your new functions and variable
names. It doesn't necessarily have to mean that you should use the same as in
other places of the code, just that the names should be logical,
understandable and be named according to what they're used for. File-local
functions should be made static. We like lower case names.
2.2 Non-clobbering All Over
See the INTERNALS document on how we name non-exported library-global
symbols.
2.2 Indenting
Please try using the same indenting levels and bracing method as all the
other code already does. It makes the source code a lot easier to follow if
all of it is written using the same style. We don't ask you to like it, we
just ask you to follow the tradition! ;-) This mainly means: 2-level indents,
using spaces only (no tabs) and having the opening brace ({) on the same line
as the if() or while().
Also note that we use if() and while() with no space before the parenthesis.
2.3 Commenting
Comment your source code extensively using C comments (/* comment */), DO NOT
use C++ comments (// this style). Commented code is quality code and enables
future modifications much more. Uncommented code risk having to be completely
replaced when someone wants to extend things, since other persons' source
code can get quite hard to read.
2.4 Line Lengths
We write source lines shorter than 80 columns.
2.5 General Style
Keep your functions small. If they're small you avoid a lot of mistakes and
you don't accidentally mix up variables etc.
2.6 Non-clobbering All Over
When you write new functionality or fix bugs, it is important that you don't
fiddle all over the source files and functions. Remember that it is likely
@ -99,7 +132,14 @@
functionality, try writing it in a new source file. If you fix bugs, try to
fix one bug at a time and send them as separate patches.
2.3 Write Separate Patches
2.7 Platform Dependent Code
Use #ifdef HAVE_FEATURE to do conditional code. We avoid checking for
particular operating systems or hardware in the #ifdef lines. The
HAVE_FEATURE shall be generated by the configure script for unix-like systems
and they are hard-coded in the config-[system].h files for the others.
2.8 Write Separate Patches
It is annoying when you get a huge patch from someone that is said to fix 511
odd problems, but discussions and opinions don't agree with 510 of them - or
@ -110,17 +150,14 @@
description exactly what they correct so that all patches can be selectively
applied by the maintainer or other interested parties.
Also, separate patches enable bisecting much better when we track problems in
the future.
2.4 Patch Against Recent Sources
2.9 Patch Against Recent Sources
Please try to get the latest available sources to make your patches
against. It makes the life of the developers so much easier. The very best is
if you get the most up-to-date sources from the git repository, but the
latest release archive is quite OK as well!
2.5 Document
2.10 Document
Writing docs is dead boring and one of the big problems with many open source
projects. Someone's gotta do it. It makes it a lot easier if you submit a
@ -131,7 +168,7 @@
ASCII files. All HTML files on the web site and in the release archives are
generated from the nroff/ASCII versions.
2.6 Test Cases
2.11 Test Cases
Since the introduction of the test suite, we can quickly verify that the main
features are working as they're supposed to. To maintain this situation and
@ -140,10 +177,6 @@
test case that verifies that it works as documented. If every submitter also
posts a few test cases, it won't end up as a heavy burden on a single person!
If you don't have test cases or perhaps you have done something that is very
hard to write tests for, do explain exactly how you have otherwise tested and
verified your changes.
3. Pushing Out Your Changes
3.1 Write Access to git Repository
@ -158,7 +191,7 @@
You need to first checkout the repository:
git clone https://github.com/curl/curl.git
git clone git://github.com/bagder/curl.git
You then proceed and edit all the files you like and you commit them to your
local repository:
@ -178,7 +211,7 @@
commit.
Now send those patches off to the curl-library list. You can of course opt to
do that with the 'git send-email' command.
do that with the 'get send-email' command.
3.3 How To Make a Patch without git
@ -200,8 +233,8 @@
For unix-like operating systems:
https://savannah.gnu.org/projects/patch/
https://www.gnu.org/software/diffutils/
http://www.gnu.org/software/patch/patch.html
http://www.gnu.org/directory/diffutils.html
For Windows:
@ -237,25 +270,9 @@
[full description, no wider than 72 columns that describe as much as
possible as to why this change is made, and possibly what things
it fixes and everything else that is related]
[Bug: link to source of the report or more related discussion]
[Reported-by: John Doe - credit the reporter]
[whatever-else-by: credit all helpers, finders, doers]
---- stop ----
Don't forget to use commit --author="" if you commit someone else's work,
and make sure that you have your own user and email setup correctly in git
before you commit
3.6 About pull requests
With git (and especially github) it is easy and tempting to send a pull
request to the curl project to have changes merged this way instead of
mailing patches to the curl-library mailing list.
We used to dislike this but we're trying to change that and accept that this
is a frictionless way for people to contribute to the project. We now welcome
pull requests!
We will continue to avoid using github's merge tools to make the history
linear and to make sure commits follow our style guidelines.

176
docs/DISTRO-DILEMMA Normal file
View File

@ -0,0 +1,176 @@
Date: February 11, 2007
Author: Daniel Stenberg <daniel@haxx.se>
URL: http://curl.haxx.se/legal/distro-dilemma.html
Condition
This document is written to describe the situation as it is right now.
libcurl 7.16.1 is currently the latest version available. Things may of
course change in the future.
This document reflects my view and understanding of these things. Please tell
me where and how you think I'm wrong, and I'll try to correct my mistakes.
Background
The Free Software Foundation has deemed the Original BSD license[1] to be
"incompatible"[2] with GPL[3]. I'd rather say it is the other way around, but
the point is the same: if you distribute a binary version of a GPL program,
it MUST NOT be linked with any Original BSD-licensed parts or libraries.
Doing so will violate the GPL license. For a long time, very many GPL
licensed programs have avoided this license mess by adding an exception[8] to
their license. And many others have just closed their eyes for this problem.
libcurl is MIT-style[4] licensed - how on earth did this dilemma fall onto
our plates?
libcurl is only a little library. libcurl can be built to use OpenSSL for its
SSL/TLS capabilities. OpenSSL is basically Original BSD licensed[5].
If libcurl built to use OpenSSL is used by a GPL-licensed application and you
decide to distribute a binary version of it (Linux distros - for example -
tend to), you have a clash. GPL vs Original BSD.
This dilemma is not libcurl-specific nor is it specific to any particular
Linux distro. (This article mentions and refers to Debian several times, but
only because Debian seems to be the only Linux distro to have faced this
issue yet since no other distro is shipping libcurl built with two SSL
libraries.)
Part of the Operating System
This would not be a problem if the used lib would be considered part of the
underlying operating system, as then the GPL license has an exception
clause[6] that allows applications to use such libs without having to be
allowed to distribute it or its sources. Possibly some distros will claim
that OpenSSL is part of their operating system.
Debian does however not take this stance and has officially(?) claimed that
OpenSSL is not a required part of the Debian operating system
Some people claim that this paragraph cannot be exploited this way by a Linux
distro, but I am not a lawyer and that is a discussion left outside of this
document.
GnuTLS
Since May 2005 libcurl can get built to use GnuTLS instead of OpenSSL. GnuTLS
is an LGPL[7] licensed library that offers a matching set of features as
OpenSSL does. Now, you can build and distribute an TLS/SSL capable libcurl
without including any Original BSD licensed code.
I believe Debian is the first (only?) distro that provides libcurl/GnutTLS
packages.
yassl
libcurl can get also get built to use yassl for the TLS/SSL layer. yassl is a
GPL[3] licensed library.
GnuTLS vs OpenSSL vs yassl
While these three libraries offer similar features, they are not equal.
libcurl does not (yet) offer a standardized stable ABI if you decide to
switch from using libcurl-openssl to libcurl-gnutls or vice versa. The GnuTLS
and yassl support is very recent in libcurl and it has not been tested nor
used very extensively, while the OpenSSL equivalent code has been used and
thus matured since 1999.
GnuTLS
- LGPL licensened
- supports SRP
- lacks SSLv2 support
- lacks MD2 support (used by at least some CA certs)
- lacks the crypto functions libcurl uses for NTLM
OpenSSL
- Original BSD licensened
- lacks SRP
- supports SSLv2
- older and more widely used
- provides crypto functions libcurl uses for NTLM
- libcurl can do non-blocking connects with it in 7.15.4 and later
yassl
- GPL licensed
- much untested and unproven in the real work by (lib)curl users so we don't
know a lot about restrictions or benefits from using this
The Better License, Original BSD, GPL or LGPL?
It isn't obvious or without debate to any objective interested party that
either of these licenses are the "better" or even the "preferred" one in a
generic situation.
Instead, I think we should accept the fact that the SSL/TLS libraries and
their different licenses will fit different applications and their authors
differently depending on the applications' licenses and their general usage
pattern (considering how GPL and LGPL libraries for example can be burdensome
for embedded systems usage).
In Debian land, there seems to be a common opinion that LGPL is "maximally
compatible" with apps while Original BSD is not. Like this:
http://lists.debian.org/debian-devel/2005/09/msg01417.html
More SSL Libraries
In libcurl, there's no stopping us here. There are more Open Source/Free
SSL/TLS libraries out there and we would very much like to support them as
well, to offer application authors an even wider scope of choice.
Application Angle of this Problem
libcurl is built to use one SSL/TLS library. It uses a single fixed name (by
default) on the built/created lib file, and applications are built/linked to
use that single lib. Replacing one libcurl instance with another one that
uses the other SSL/TLS library might break one or more applications (due to
ABI differences and/or different feature set). You want your application to
use the libcurl it was built for.
Project cURL Angle of this Problem
We distribute libcurl and everyone may build libcurl with either library at
their choice. This problem is not directly a problem of ours. It merely
affects users - GPL application authors only - of our lib as it comes
included and delivered on some distros.
libcurl has different ABI when built with different SSL/TLS libraries due to
these reasons:
1. No one has worked on fixing this. The mutex/lock callbacks should be set
with a generic libcurl function that should use the proper underlying
functions.
2. The CURLOPT_SSL_CTX_FUNCTION option is not possible to "emulate" on GnuTLS
but simply requires OpenSSL.
3. There might be some other subtle differences just because nobody has yet
tried to make a fixed ABI like this.
Distro Angle of this Problem
To my knowledge there is only one distro that ships libcurl built with either
OpenSSL or GnuTLS.
Debian Linux is now (since mid September 2005) providing two different
libcurl packages, one for libcurl built with OpenSSL and one built with
GnuTLS. They use different .so names and can this both be installed in a
single system simultaneously. This has been said to be a transitional system
not desired to keep in the long run.
Footnotes
[1] = http://www.xfree86.org/3.3.6/COPYRIGHT2.html#6
[2] = http://www.fsf.org/licensing/essays/bsd.html
[3] = http://www.fsf.org/licensing/licenses/gpl.html
[4] = http://curl.haxx.se/docs/copyright.html
[5] = http://www.openssl.org/source/license.html
[6] = http://www.fsf.org/licensing/licenses/gpl.html end of section 3
[7] = http://www.fsf.org/licensing/licenses/lgpl.html
[8] = http://en.wikipedia.org/wiki/OpenSSL_exception
Feedback/Updates provided by
Eric Cooper

515
docs/FAQ
View File

@ -1,3 +1,4 @@
Updated: March 8, 2011 (http://curl.haxx.se/docs/faq.html)
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
@ -21,7 +22,6 @@ FAQ
1.12 I have a problem who can I chat with?
1.13 curl's ECCN number?
1.14 How do I submit my patch?
1.15 How do I port libcurl to my OS?
2. Install Related Problems
2.1 configure doesn't find OpenSSL even when it is installed
@ -30,14 +30,13 @@ FAQ
2.2 Does curl work/build with other SSL libraries?
2.3 Where can I find a copy of LIBEAY32.DLL?
2.4 Does curl support SOCKS (RFC 1928) ?
2.5 Install libcurl for both 32bit and 64bit?
3. Usage Problems
3.1 curl: (1) SSL is disabled, https: not supported
3.2 How do I tell curl to resume a transfer?
3.3 Why doesn't my posting using -F work?
3.4 How do I tell curl to run custom FTP commands?
3.5 How can I disable the Accept: */* header?
3.5 How can I disable the Pragma: nocache header?
3.6 Does curl support ASP, XML, XHTML or HTML version Y?
3.7 Can I use curl to delete/rename a file through FTP?
3.8 How do I tell curl to follow HTTP redirects?
@ -54,7 +53,6 @@ FAQ
3.19 How do I get HTTP from a host using a specific IP address?
3.20 How to SFTP from my user's home directory?
3.21 Protocol xxx not supported or disabled in libcurl
3.22 curl -X gives me HTTP problems
4. Running Problems
4.1 Problems connecting to SSL servers.
@ -82,8 +80,6 @@ FAQ
4.17 Non-functional connect timeouts on Windows
4.18 file:// URLs containing drive letters (Windows, NetWare)
4.19 Why doesn't cURL return an error when the network cable is unplugged?
4.20 curl doesn't return error for HTTP non-200 responses!
4.21 Why is there a HTTP/1.1 in my HTTP/2 request?
5. libcurl Issues
5.1 Is libcurl thread-safe?
@ -101,9 +97,6 @@ FAQ
5.13 How do I stop an ongoing transfer?
5.14 Using C++ non-static functions for callbacks?
5.15 How do I get an FTP directory listing?
5.16 I want a different time-out!
5.17 Can I write a server with libcurl?
5.18 Does libcurl use threads?
6. License Issues
6.1 I have a GPL program, can I use the libcurl library?
@ -137,15 +130,15 @@ FAQ
A free and easy-to-use client-side URL transfer library, supporting DICT,
FILE, FTP, FTPS, GOPHER, HTTP, HTTPS, IMAP, IMAPS, LDAP, LDAPS, POP3,
POP3S, RTMP, RTSP, SCP, SFTP, SMB, SMBS, SMTP, SMTPS, TELNET and TFTP.
POP3S, RTMP, RTSP, SCP, SFTP, SMTP, SMTPS, TELNET and TFTP.
libcurl supports HTTPS certificates, HTTP POST, HTTP PUT, FTP uploading,
Kerberos, SPNEGO, HTTP form based upload, proxies, cookies, user+password
kerberos, HTTP form based upload, proxies, cookies, user+password
authentication, file transfer resume, http proxy tunneling and more!
libcurl is highly portable, it builds and works identically on numerous
platforms, including Solaris, NetBSD, FreeBSD, OpenBSD, Darwin, HP-UX,
IRIX, AIX, Tru64, Linux, UnixWare, HURD, Windows, Amiga, OS/2, BeOS, Mac
platforms, including Solaris, NetBSD, FreeBSD, OpenBSD, Darwin, HPUX,
IRIX, AIX, Tru64, Linux, UnixWare, HURD, Windows, Amiga, OS/2, BeOs, Mac
OS X, Ultrix, QNX, OpenVMS, RISC OS, Novell NetWare, DOS, Symbian, OSF,
Android, Minix, IBM TPF and more...
@ -159,10 +152,7 @@ FAQ
Since curl uses libcurl, curl supports the same wide range of common
Internet protocols that libcurl does.
We pronounce curl with an initial k sound. It rhymes with words like girl
and earl. This is a short WAV file to help you:
http://media.merriam-webster.com/soundc11/c/curl0001.wav
We pronounce curl and cURL with an initial k sound: [kurl].
There are numerous sub-projects and related projects that also use the word
curl in the project names in various combinations, but you should take
@ -210,27 +200,27 @@ FAQ
better. We do however believe in a few rules when it comes to the future of
curl:
Curl -- the command line tool -- is to remain a non-graphical command line
tool. If you want GUIs or fancy scripting capabilities, you should look for
another tool that uses libcurl.
* Curl -- the command line tool -- is to remain a non-graphical command line
tool. If you want GUIs or fancy scripting capabilities, you should look
for another tool that uses libcurl.
We do not add things to curl that other small and available tools already do
very fine at the side. Curl's output is fine to pipe into another program or
redirect to another file for the next program to interpret.
* We do not add things to curl that other small and available tools already
do very fine at the side. Curl's output is fine to pipe into another
program or redirect to another file for the next program to interpret.
We focus on protocol related issues and improvements. If you wanna do more
magic with the supported protocols than curl currently does, chances are big
we will agree. If you wanna add more protocols, we may very well agree.
* We focus on protocol related issues and improvements. If you wanna do more
magic with the supported protocols than curl currently does, chances are
big we will agree. If you wanna add more protocols, we may very well
agree.
If you want someone else to make all the work while you wait for us to
implement it for you, that is not a very friendly attitude. We spend a
considerable time already on maintaining and developing curl. In order to
get more out of us, you should consider trading in some of your time and
efforts in return. Simply go to the GitHub repo which resides at
https://github.com/curl/curl, fork the project, and create pull requests
with your proposed changes.
* If you want someone else to make all the work while you wait for us to
implement it for you, that is not a very friendly attitude. We spend a
considerable time already on maintaining and developing curl. In order to
get more out of us, you should consider trading in some of your time and
efforts in return.
If you write the code, chances are bigger that it will get into curl faster.
* If you write the code, chances are bigger that it will get into curl
faster.
1.5 Who makes curl?
@ -247,17 +237,16 @@ FAQ
1.6 What do you get for making curl?
Project cURL is entirely free and open. No person gets paid for developing
curl on full time. We do this voluntarily, mostly on spare time.
Occasionally companies pay individual developers to work on curl, but that's
up to each company and developer. It is not controlled by nor supervised in
any way by the project.
(lib)curl on full or even part time. We do this voluntarily on our spare
time. Occasionally companies pay individual developers to work on curl, but
that's up to each company and developer. It is not controlled by nor
supervised in any way by the project.
We still get help from companies. Haxx provides web site, bandwidth, mailing
lists etc, sourceforge.net hosts project services we take advantage from,
like the bug tracker, and GitHub hosts the primary git repository at
https://github.com/curl/curl. Also again, some companies have sponsored
certain parts of the development in the past and I hope some will continue to
do so in the future.
lists etc and sourceforge.net hosts project services we take advantage from,
like the bug tracker. Also again, some companies have sponsored certain
parts of the development in the past and I hope some will continue to do so
in the future.
If you want to support our project, consider a donation or a banner-program
or even better: by helping us coding, documenting, testing etc.
@ -272,7 +261,7 @@ FAQ
Our project name curl has been in effective use since 1998. We were not the
first computer related project to use the name "curl" and do not claim any
rights to the name.
first-hand rights to the name.
We recognize that we will be living in parallel with curl.com and wish them
every success.
@ -282,7 +271,7 @@ FAQ
Please do not mail any single individual unless you really need to. Keep
curl-related questions on a suitable mailing list. All available mailing
lists are listed in the MANUAL document and online at
https://curl.haxx.se/mail/
http://curl.haxx.se/mail/
Keeping curl-related questions and discussions on mailing lists allows
others to join in and help, to share their ideas, contribute their
@ -303,7 +292,7 @@ FAQ
your curl-related problems.
We list available alternatives on the curl web site:
https://curl.haxx.se/support.html
http://curl.haxx.se/support.html
1.10 How many are using curl?
@ -317,17 +306,49 @@ FAQ
We don't know how many users that downloaded or installed curl and then
never use it.
In May 2012 Daniel did a counting game and came up with a number that may
be completely wrong or somewhat accurate. Over 500 million!
Some facts to use as input to the math:
See https://daniel.haxx.se/blog/2012/05/16/300m-users/
curl packages are downloaded from the curl.haxx.se and mirrors over a
million times per year. curl is installed by default with most Linux
distributions. curl is installed by default with Mac OS X. curl and libcurl
as used by numerous applications that include libcurl binaries in their
distribution packages (like Adobe Acrobat Reader and Google Earth).
More than a hundred known named companies use curl in commercial
environments and products and more than a hundred known named open source
projects depend on (lib)curl.
In a poll on the curl web site mid-2005, more than 50% of the 300+ answers
estimated a user base of one million users or more.
In March 2005, the "Linux Counter project" estimated a total Linux user base
of some 29 millions, while Netcraft detected some 4 million "active" Linux
based web servers. A guess is that a fair amount of these Linux
installations have curl installed.
The Debian project maintains statistics on packages installed by people
who have voluntarily run their package counting application. In mid-2010,
libcurl3 was installed on over 55000 such systems (62% of reporting systems)
and was one of the 320 most popular installed packages (out of about 107000
possible packages).
All this taken together, there is no doubt that there are millions of
(lib)curl users.
http://curl.haxx.se/docs/companies.html
http://curl.haxx.se/docs/programs.html
http://curl.haxx.se/libcurl/using/apps.html
http://counter.li.org/estimates.php
http://news.netcraft.com/archives/2005/03/14/fedora_makes_rapid_progress.html
http://qa.debian.org/popcon.php?package=curl
1.11 Why don't you update ca-bundle.crt
The ca cert bundle that used to shipped with curl was very outdated and must
be replaced with an up-to-date version by anyone who wants to verify
peers. It is no longer provided by curl. The last curl release ever that
shipped a ca cert bundle was curl 7.18.0.
The ca-bundle.crt file that used to be bundled with curl was very outdated
(it being last modified year 2000 should tell) and must be replaced with a
much more modern and up-to-date version by anyone who wants to verify peers
anyway. It is no longer provided, the last curl release that shipped it was
curl 7.18.0.
In the cURL project we've decided not to attempt to keep this file updated
(or even present anymore) since deciding what to add to a ca cert bundle is
@ -342,7 +363,7 @@ FAQ
If you want the most recent collection of ca certs that Mozilla Firefox
uses, we recommend that you extract the collection yourself from Mozilla
Firefox (by running 'make ca-bundle), or by using our online service setup
for this purpose: https://curl.haxx.se/docs/caextract.html
for this purpose: http://curl.haxx.se/docs/caextract.html
1.12 I have a problem who can I chat with?
@ -356,15 +377,13 @@ FAQ
cryptography. When doing so, the Export Control Classification Number (ECCN)
is used to identify the level of export control etc.
Apache Software Foundation gives a good explanation of ECCNs at
https://www.apache.org/dev/crypto.html
ASF gives a good explanation at http://www.apache.org/dev/crypto.html
We believe curl's number might be ECCN 5D002, another possibility is
5D992. It seems necessary to write them (the authority that administers ECCN
numbers), asking to confirm.
5D992. It seems necessary to write them, asking to confirm.
Comprehensible explanations of the meaning of such numbers and how to obtain
them (resp.) are here
Comprehensible explanations of the meaning of such numbers and how to
obtain them (resp.) are here
http://www.bis.doc.gov/licensing/exportingbasics.htm
http://www.bis.doc.gov/licensing/do_i_needaneccn.html
@ -387,19 +406,6 @@ FAQ
Lots of more details are found in the CONTRIBUTE and INTERNALS docs.
1.15 How do I port libcurl to my OS?
Here's a rough step-by-step:
1. copy a suitable lib/config-*.h file as a start to lib/config-[youros].h
2. edit lib/config-[youros].h to match your OS and setup
3. edit lib/curl_setup.h to include config-[youros].h when your OS is
detected by the preprocessor, in the style others already exist
4. compile lib/*.c and make them into a library
2. Install Related Problems
@ -442,55 +448,24 @@ FAQ
2.2 Does curl work/build with other SSL libraries?
Curl has been written to use a generic SSL function layer internally, and
that SSL functionality can then be provided by one out of many different SSL
backends.
curl can be built to use one of the following SSL alternatives: OpenSSL,
GnuTLS, yassl, NSS, PolarSSL, axTLS, Secure Transport (native iOS/OS X),
WinSSL (native Windows) or GSKit (native IBM i). They all have their pros
and cons, and we try to maintain a comparison of them here:
https://curl.haxx.se/docs/ssl-compared.html
Curl has been written to use OpenSSL, GnuTLS, yassl, NSS, PolarSSL, axTLS or
qssl, although there should not be many problems using a different
library. If anyone does "port" curl to use a different SSL library, we are
of course very interested in getting the patch!
2.3 Where can I find a copy of LIBEAY32.DLL?
That is an OpenSSL binary built for Windows.
Curl can be built with OpenSSL to do the SSL stuff. The LIBEAY32.DLL is then
what curl needs on a windows machine to do https:// etc. Check out the curl
web site to find accurate and up-to-date pointers to recent OpenSSL DLLs and
other binary packages.
Curl uses OpenSSL to do the SSL stuff. The LIBEAY32.DLL is what curl needs
on a windows machine to do https://. Check out the curl web site to find
accurate and up-to-date pointers to recent OpenSSL DLLs and other binary
packages.
2.4 Does curl support SOCKS (RFC 1928) ?
Yes, SOCKS 4 and 5 are supported.
2.5 Install libcurl for both 32bit and 64bit?
In curl's configure procedure one of the regular include files get created
with platform specific information. The file 'curl/curlbuild.h' in the
installed libcurl file tree is therefore somewhat tied to that particular
platform.
To allow applications to get built for either 32bit or 64bit you need to
install libcurl headers for both setups and unfortunately curl doesn't do
this automatically.
A commonly used procedure is this:
$ ./configure [32bit platform]
$ mv curl/curlbuild.h curl/curlbuild-32bit.h
$ ./configure [64bit platform]
$ mv curl/curlbuild.h curl/curlbuild-64bit.h
Then you make a toplevel curl/curlbuild.h replacement that only does this:
#ifdef IS_32BIT
#include "curlbuild-32bit.h"
else
#include "curlbuild-64bit.h"
#endif
3. Usage problems
@ -516,14 +491,15 @@ FAQ
3.2 How do I tell curl to resume a transfer?
Curl supports resumed transfers both ways on both FTP and HTTP.
Try the -C option.
3.3 Why doesn't my posting using -F work?
You can't simply use -F or -d at your choice. The web server that will
receive your post expects one of the formats. If the form you're trying to
submit uses the type 'multipart/form-data', then and only then you must use
the -F type. In all the most common cases, you should use -d which then
receive your post assumes one of the formats. If the form you're trying to
"fake" sets the type to 'multipart/form-data', then and only then you must
use the -F type. In all the most common cases, you should use -d which then
causes a posting with the type 'application/x-www-form-urlencoded'.
This is described in some detail in the MANUAL and TheArtOfHttpScripting
@ -537,23 +513,22 @@ FAQ
You can tell curl to perform optional commands both before and/or after a
file transfer. Study the -Q/--quote option.
Since curl is used for file transfers, you don't normally use curl to
perform FTP commands without transferring anything. Therefore you must
always specify a URL to transfer to/from even when doing custom FTP
commands, or use -I which implies the "no body" option sent to libcurl.
Since curl is used for file transfers, you don't use curl to just perform
FTP commands without transferring anything. Therefore you must always specify
a URL to transfer to/from even when doing custom FTP commands.
3.5 How can I disable the Accept: */* header?
3.5 How can I disable the Pragma: nocache header?
You can change all internally generated headers by adding a replacement with
the -H/--header option. By adding a header with empty contents you safely
disable that one. Use -H "Accept:" to disable that specific header.
disable that one. Use -H "Pragma:" to disable that specific header.
3.6 Does curl support ASP, XML, XHTML or HTML version Y?
To curl, all contents are alike. It doesn't matter how the page was
generated. It may be ASP, PHP, Perl, shell-script, SSI or plain HTML
files. There's no difference to curl and it doesn't even know what kind of
language that generated the page.
generated. It may be ASP, PHP, Perl, shell-script, SSI or plain
HTML-files. There's no difference to curl and it doesn't even know what kind
of language that generated the page.
See also item 3.14 regarding javascript.
@ -588,13 +563,7 @@ FAQ
Find out more about which languages that support curl directly, and how to
install and use them, in the libcurl section of the curl web site:
https://curl.haxx.se/libcurl/
All the various bindings to libcurl are made by other projects and people,
outside of the cURL project. The cURL project itself only produces libcurl
with its plain C API. If you don't find anywhere else to ask you can ask
about bindings on the curl-library list too, but be prepared that people on
that list may not know anything about bindings.
http://curl.haxx.se/libcurl/
In October 2009, there were interfaces available for the following
languages: Ada95, Basic, C, C++, Ch, Cocoa, D, Dylan, Eiffel, Euphoria,
@ -670,15 +639,15 @@ FAQ
Some workarounds usually suggested to overcome this Javascript dependency:
Depending on the Javascript complexity, write up a script that translates it
to another language and execute that.
- Depending on the Javascript complexity, write up a script that
translates it to another language and execute that.
Read the Javascript code and rewrite the same logic in another language.
- Read the Javascript code and rewrite the same logic in another language.
Implement a Javascript interpreter, people have successfully used the
Mozilla Javascript engine in the past.
- Implement a Javascript interpreter, people have successfully used the
Mozilla Javascript engine in the past.
Ask your admins to stop this, for a static proxy setup or similar.
- Ask your admins to stop this, for a static proxy setup or similar.
3.15 Can I do recursive fetches with curl?
@ -694,38 +663,34 @@ FAQ
There are three different kinds of "certificates" to keep track of when we
talk about using SSL-based protocols (HTTPS or FTPS) using curl or libcurl.
CLIENT CERTIFICATE
- Client certificate. The server you communicate may require that you can
provide this in order to prove that you actually are who you claim to be.
If the server doesn't require this, you don't need a client certificate.
The server you communicate may require that you can provide this in order to
prove that you actually are who you claim to be. If the server doesn't
require this, you don't need a client certificate.
A client certificate is always used together with a private key, and the
private key has a pass phrase that protects it.
A client certificate is always used together with a private key, and the
private key has a pass phrase that protects it.
- Server certificate. The server you communicate with has a server
certificate. You can and should verify this certificate to make sure that
you are truly talking to the real server and not a server impersonating
it.
SERVER CERTIFICATE
- Certificate Authority certificate ("CA cert"). You often have several CA
certs in a CA cert bundle that can be used to verify a server certificate
that was signed by one of the authorities in the bundle. curl does not
come with a CA cert bundle but most curl installs provide one. You can
also override the default.
The server you communicate with has a server certificate. You can and should
verify this certificate to make sure that you are truly talking to the real
server and not a server impersonating it.
CERTIFICATE AUTHORITY CERTIFICATE ("CA cert")
You often have several CA certs in a CA cert bundle that can be used to
verify a server certificate that was signed by one of the authorities in the
bundle. curl does not come with a CA cert bundle but most curl installs
provide one. You can also override the default.
The server certificate verification process is made by using a Certificate
Authority certificate ("CA cert") that was used to sign the server
certificate. Server certificate verification is enabled by default in curl
and libcurl and is often the reason for problems as explained in FAQ entry
4.12 and the SSLCERTS document
(https://curl.haxx.se/docs/sslcerts.html). Server certificates that are
"self-signed" or otherwise signed by a CA that you do not have a CA cert
for, cannot be verified. If the verification during a connect fails, you are
refused access. You then need to explicitly disable the verification to
connect to the server.
The server certificate verification process is made by using a Certificate
Authority certificate ("CA cert") that was used to sign the server
certificate. Server certificate verification is enabled by default in curl
and libcurl and is often the reason for problems as explained in FAQ entry
4.12 and the SSLCERTS document
(http://curl.haxx.se/docs/sslcerts.html). Server certificates that are
"self-signed" or otherwise signed by a CA that you do not have a CA cert
for, cannot be verified. If the verification during a connect fails, you
are refused access. You then need to explicitly disable the verification
to connect to the server.
3.17 How do I list the root dir of an FTP server?
@ -756,12 +721,6 @@ FAQ
curl --header "Host: www.example.com" http://127.0.0.1/
You can also opt to add faked host name entries to curl with the --resolve
option. That has the added benefit that things like redirects will also work
properly. The above operation would instead be done as:
curl --resolve www.example.com:80:127.0.0.1 http://www.example.com/
3.20 How to SFTP from my user's home directory?
Contrary to how FTP works, SFTP and SCP URLs specify the exact directory to
@ -782,7 +741,7 @@ FAQ
When passing on a URL to curl to use, it may respond that the particular
protocol is not supported or disabled. The particular way this error message
is phrased is because curl doesn't make a distinction internally of whether
a particular protocol is not supported (i.e. never got any code added that
a particular protocol is not supported (ie never got any code added that
knows how to speak that protocol) or if it was explicitly disabled. curl can
be built to only support a given set of protocols, and the rest would then
be disabled or not supported.
@ -791,34 +750,6 @@ FAQ
part as in "htpt://example.com" or as in the less evident case if you prefix
the protocol part with a space as in " http://example.com/".
3.22 curl -X gives me HTTP problems
In normal circumstances, -X should hardly ever be used.
By default you use curl without explicitly saying which request method to
use when the URL identifies a HTTP transfer. If you just pass in a URL like
"curl http://example.com" it will use GET. If you use -d or -F curl will use
POST, -I will cause a HEAD and -T will make it a PUT.
If for whatever reason you're not happy with these default choices that curl
does for you, you can override those request methods by specifying -X
[WHATEVER]. This way you can for example send a DELETE by doing "curl -X
DELETE [URL]".
It is thus pointless to do "curl -XGET [URL]" as GET would be used
anyway. In the same vein it is pointless to do "curl -X POST -d data
[URL]"... But you can make a fun and somewhat rare request that sends a
request-body in a GET request with something like "curl -X GET -d data
[URL]"
Note that -X doesn't actually change curl's behavior as it only modifies the
actual string sent in the request, but that may of course trigger a
different set of events.
Accordingly, by using -XPOST on a command line that for example would follow
a 303 redirect, you will effectively prevent curl from behaving
correctly. Be aware.
4. Running Problems
@ -849,13 +780,12 @@ FAQ
curl 'http://www.altavista.com/cgi-bin/query?text=yes&q=curl'
In Windows, the standard DOS shell treats the percent sign specially and you
need to use TWO percent signs for each single one you want to use in the
URL.
In Windows, the standard DOS shell treats the %-symbol specially and you
need to use TWO %-symbols for each single one you want to use in the URL.
If you want a literal percent sign to be part of the data you pass in a POST
using -d/--data you must encode it as '%25' (which then also needs the
percent sign doubled on Windows machines).
Also note that if you want the literal %-symbol to be part of the data you
pass in a POST using -d/--data you must encode it as '%25' (which then also
needs the %-symbol doubled on Windows machines).
4.3 How can I use {, }, [ or ] to specify multiple URLs?
@ -897,7 +827,7 @@ FAQ
4.5.3 "403 Forbidden"
The server understood the request, but is refusing to fulfil it.
The server understood the request, but is refusing to fulfill it.
Authorization will not help and the request SHOULD NOT be repeated.
4.5.4 "404 Not Found"
@ -968,8 +898,8 @@ FAQ
4.9 Curl can't authenticate to the server that requires NTLM?
NTLM support requires OpenSSL, GnuTLS, NSS, Secure Transport, or Microsoft
Windows libraries at build-time to provide this functionality.
NTLM support requires OpenSSL, GnuTLS, NSS or Microsoft Windows libraries at
build-time to provide this functionality.
NTLM is a Microsoft proprietary protocol. Proprietary formats are evil. You
should not use such ones.
@ -1012,7 +942,7 @@ FAQ
this check.
Details are also in the SSLCERTS file in the release archives, found online
here: https://curl.haxx.se/docs/sslcerts.html
here: http://curl.haxx.se/docs/sslcerts.html
4.13 Why is curl -R on Windows one hour off?
@ -1024,13 +954,13 @@ FAQ
4.14 Redirects work in browser but not with curl!
curl supports HTTP redirects fine (see item 3.8). Browsers generally support
at least two other ways to perform redirects that curl does not:
at least two other ways to perform directs that curl does not:
Meta tags. You can write a HTML tag that will cause the browser to redirect
to another given URL after a certain time.
- Meta tags. You can write a HTML tag that will cause the browser to
redirect to another given URL after a certain time.
Javascript. You can write a Javascript program embedded in a HTML page that
redirects the browser to another given URL.
- Javascript. You can write a Javascript program embedded in a HTML page
that redirects the browser to another given URL.
There is no way to make curl follow these redirects. You must either
manually figure out what the page is set to do, or you write a script that
@ -1074,7 +1004,7 @@ FAQ
timeout is set.
See option TcpMaxConnectRetransmissions on this page:
https://support.microsoft.com/en-us/kb/175523/en-us
http://support.microsoft.com/?scid=kb%3Ben-us%3B175523&x=6&y=7
Also, even on non-Windows systems there may run a firewall or anti-virus
software or similar that accepts the connection but does not actually do
@ -1091,7 +1021,7 @@ FAQ
You'll find that even if D:\blah.txt does exist, cURL returns a 'file
not found' error.
According to RFC 1738 (https://www.ietf.org/rfc/rfc1738.txt),
According to RFC 1738 (http://www.faqs.org/rfcs/rfc1738.html),
file:// URLs must contain a host component, but it is ignored by
most implementations. In the above example, 'D:' is treated as the
host component, and is taken away. Thus, cURL tries to open '/blah.txt'.
@ -1111,15 +1041,15 @@ FAQ
4.19 Why doesn't cURL return an error when the network cable is unplugged?
Unplugging a cable is not an error situation. The TCP/IP protocol stack
Unplugging the cable is not an error situation. The TCP/IP protocol stack
was designed to be fault tolerant, so even though there may be a physical
break somewhere the connection shouldn't be affected, just possibly
delayed. Eventually, the physical break will be fixed or the data will be
re-routed around the physical problem through another path.
re-routed around the physical problem.
In such cases, the TCP/IP stack is responsible for detecting when the
network connection is irrevocably lost. Since with some protocols it is
perfectly legal for the client to wait indefinitely for data, the stack may
perfectly legal for the client wait indefinitely for data, the stack may
never report a problem, and even when it does, it can take up to 20 minutes
for it to detect an issue. The curl option --keepalive-time enables
keep-alive support in the TCP/IP stack which makes it periodically probe the
@ -1133,46 +1063,6 @@ FAQ
falls too low, and --connect-timeout and --max-time can be used to put an
overall timeout on the connection phase or the entire transfer.
A libcurl-using application running in a known physical environment (e.g.
an embedded device with only a single network connection) may want to act
immediately if its lone network connection goes down. That can be achieved
by having the application monitor the network connection on its own using an
OS-specific mechanism, then signalling libcurl to abort (see also item 5.13).
4.20 curl doesn't return error for HTTP non-200 responses!
Correct. Unless you use -f (--fail).
When doing HTTP transfers, curl will perform exactly what you're asking it
to do and if successful it will not return an error. You can use curl to
test your web server's "file not found" page (that gets 404 back), you can
use it to check your authentication protected web pages (that get a 401
back) and so on.
The specific HTTP response code does not constitute a problem or error for
curl. It simply sends and delivers HTTP as you asked and if that worked,
everything is fine and dandy. The response code is generally providing more
higher level error information that curl doesn't care about. The error was
not in the HTTP transfer.
If you want your command line to treat error codes in the 400 and up range
as errors and thus return a non-zero value and possibly show an error
message, curl has a dedicated option for that: -f (CURLOPT_FAILONERROR in
libcurl speak).
You can also use the -w option and the variable %{response_code} to extract
the exact response code that was return in the response.
4.21 Why is there a HTTP/1.1 in my HTTP/2 request?
If you use verbose to see the HTTP request when you send off a HTTP/2
request, it will still say 1.1.
The reason for this is that we first generate the request to send using the
old 1.1 style and show that request in the verbose output, and then we
convert it over to the binary header-compressed HTTP/2 style. The actual
"1.1" part from that request is then not actually used in the transfer. The
binary HTTP/2 headers are not human readable.
5. libcurl Issues
@ -1182,26 +1072,18 @@ FAQ
We have written the libcurl code specifically adjusted for multi-threaded
programs. libcurl will use thread-safe functions instead of non-safe ones if
your system has such. Note that you must never share the same handle in
multiple threads.
libcurl's implementation of timeouts might use signals (depending on what it
was built to use for name resolving), and signal handling is generally not
thread-safe. Multi-threaded Applicationss that call libcurl from different
threads (on different handles) might want to use CURLOPT_NOSIGNAL, e.g.:
curl_easy_setopt(handle, CURLOPT_NOSIGNAL, true);
your system has such.
If you use a OpenSSL-powered libcurl in a multi-threaded environment, you
need to provide one or two locking functions:
https://www.openssl.org/docs/crypto/threads.html
http://www.openssl.org/docs/crypto/threads.html
If you use a GnuTLS-powered libcurl in a multi-threaded environment, you
need to provide locking function(s) for libgcrypt (which is used by GnuTLS
for the crypto functions).
https://web.archive.org/web/20111103083330/http://www.gnu.org/software/gnutls/manual/html_node/Multi_002dthreaded-applications.html
http://www.gnu.org/software/gnutls/manual/html_node/Multi_002dthreaded-applications.html
No special locking is needed with a NSS-powered libcurl. NSS is thread-safe.
@ -1270,11 +1152,6 @@ FAQ
libcurl will reuse connections for all transfers that are made using the
same libcurl handle.
When you use the easy interface, the connection cache is kept within the
easy handle. If you instead use the multi interface, the connection cache
will be kept within the multi handle and will be shared among all the easy
handles that are used within the same multi handle.
5.7 Link errors when building libcurl on Windows!
You need to make sure that your project, and all the libraries (both static
@ -1332,7 +1209,7 @@ FAQ
you want to change name resolver function you must rebuild libcurl and tell
it to use a different function.
- The non-IPv6 resolver that can use one out of four host name resolve calls
- The non-ipv6 resolver that can use one out of four host name resolve calls
(depending on what your system supports):
A - gethostbyname()
@ -1340,15 +1217,15 @@ FAQ
C - gethostbyname_r() with 5 arguments
D - gethostbyname_r() with 6 arguments
- The IPv6-resolver that uses getaddrinfo()
- The ipv6-resolver that uses getaddrinfo()
- The c-ares based name resolver that uses the c-ares library for resolves.
Using this offers asynchronous name resolves.
- The threaded resolver (default option on Windows). It uses:
A - gethostbyname() on plain IPv4 hosts
B - getaddrinfo() on IPv6 enabled hosts
A - gethostbyname() on plain ipv4 hosts
B - getaddrinfo() on ipv6-enabled hosts
Also note that libcurl never resolves or reverse-lookups addresses given as
pure numbers, such as 127.0.0.1 or ::1.
@ -1366,32 +1243,32 @@ FAQ
5.12 Can I make libcurl fake or hide my real IP address?
No. libcurl operates on a higher level. Besides, faking IP address would
imply sending IP packet with a made-up source address, and then you normally
get a problem with receiving the packet sent back as they would then not be
routed to you!
No. libcurl operates on a higher level than so. Besides, faking IP address
would imply sending IP packages with a made-up source address, and then you
normally get a problem with intercepting the packages sent back as they
would then not be routed to you!
If you use a proxy to access remote sites, the sites will not see your local
IP address but instead the address of the proxy.
Also note that on many networks NATs or other IP-munging techniques are used
that makes you see and use a different IP address locally than what the
remote server will see you coming from. You may also consider using
https://www.torproject.org/ .
remote server will see you coming from.
5.13 How do I stop an ongoing transfer?
With the easy interface you make sure to return the correct error code from
one of the callbacks, but none of them are instant. There is no function you
can call from another thread or similar that will stop it immediately.
Instead, you need to make sure that one of the callbacks you use returns an
appropriate value that will stop the transfer. Suitable callbacks that you
can do this with include the progress callback, the read callback and the
write callback.
Instead you need to make sure that one of the callbacks you use return an
appropriate value that will stop the transfer.
Suitable callbacks that you can do this with include the progress callback,
the read callback and the write callback.
If you're using the multi interface, you can also stop a transfer by
removing the particular easy handle from the multi stack at any moment you
think the transfer is done or when you wish to abort the transfer.
removing the particular easy handle from the multi stack. At any moment you
think the transfer is done.
5.14 Using C++ non-static functions for callbacks?
@ -1429,48 +1306,10 @@ FAQ
to do "LIST -a" or similar to see them.
The application thus needs to parse the LIST output. One such existing
list parser is available at https://cr.yp.to/ftpparse.html Versions of
list parser is available at http://cr.yp.to/ftpparse.html Versions of
libcurl since 7.21.0 also provide the ability to specify a wildcard to
download multiple files from one FTP directory.
5.16 I want a different time-out!
Time and time again users realize that CURLOPT_TIMEOUT and
CURLOPT_CONNECTIMEOUT are not sufficiently advanced or flexible to cover all
the various use cases and scenarios applications end up with.
libcurl offers many more ways to time-out operations. A common alternative
is to use the CURLOPT_LOW_SPEED_LIMIT and CURLOPT_LOW_SPEED_TIME options to
specify the lowest possible speed to accept before to consider the transfer
timed out.
The most flexible way is by writing your own time-out logic and using
CURLOPT_PROGRESSFUNCTION (perhaps in combination with other callbacks) and
use that to figure out exactly when the right condition is met when the
transfer should get stopped.
5.17 Can I write a server with libcurl?
No. libcurl offers no functions or building blocks to build any kind of
internet protocol server. libcurl is only a client-side library. For server
libraries, you need to continue your search elsewhere but there exist many
good open source ones out there for most protocols you could possibly want a
server for. And there are really good stand-alone ones that have been tested
and proven for many years. There's no need for you to reinvent them!
5.18 Does libcurl use threads?
Put simply: no, libcurl will execute in the same thread you call it in. All
callbacks will be called in the same thread as the one you call libcurl in.
If you want to avoid your thread to be blocked by the libcurl call, you make
sure you use the non-blocking API which will do transfers asynchronously -
but still in the same single thread.
libcurl will potentially internally use threads for name resolving, if it
was built to work like that, but in those cases it'll create the child
threads by itself and they will only be used and then killed internally by
libcurl and never exposed to the outside.
6. License Issues
@ -1548,7 +1387,7 @@ FAQ
notice" somewhere. Most probably like in the documentation or in the section
where other third party dependencies already are mentioned and acknowledged.
As can be seen here: https://curl.haxx.se/docs/companies.html and elsewhere,
As can be seen here: http://curl.haxx.se/docs/companies.html and elsewhere,
more and more companies are discovering the power of libcurl and take
advantage of it even in commercial environments.
@ -1568,7 +1407,9 @@ FAQ
7.2 Who wrote PHP/CURL?
PHP/CURL was initially written by Sterling Hughes.
PHP/CURL is a module that comes with the regular PHP package. It depends and
uses libcurl, so you need to have libcurl installed properly first before
PHP/CURL can be used. PHP/CURL was initially written by Sterling Hughes.
7.3 Can I perform multiple requests using the same handle?
@ -1577,10 +1418,4 @@ FAQ
unknown to me).
After a transfer, you just set new options in the handle and make another
transfer. This will make libcurl re-use the same connection if it can.
7.4 Does PHP/CURL have dependencies?
PHP/CURL is a module that comes with the regular PHP package. It depends on
and uses libcurl, so you need to have libcurl installed properly before
PHP/CURL can be used.
transfer. This will make libcurl to re-use the same connection if it can.

View File

@ -13,29 +13,27 @@ curl tool
- multiple file upload on a single command line
- custom maximum transfer rate
- redirectable stderr
- metalink support (*13)
libcurl
libcurl supports
- full URL syntax with no length limit
- custom maximum download time
- custom least download speed acceptable
- custom output result after completion
- guesses protocol from host name unless specified
- uses .netrc
- progress bar with time statistics while downloading
- progress bar/time specs while downloading
- "standard" proxy environment variables support
- compiles on win32 (reported builds on 40+ operating systems)
- selectable network interface for outgoing traffic
- IPv6 support on unix and Windows
- persistent connections
- socks 4 + 5 support, with or without local name resolving
- supports user name and password in proxy environment variables
- persistant connections
- socks5 support
- supports user name + password in proxy environment variables
- operations through proxy "tunnel" (using CONNECT)
- support for large files (>2GB and >4GB) during upload and download
- replaceable memory functions (malloc, free, realloc, etc)
- supports large files (>2GB and >4GB) both upload/download
- replacable memory functions (malloc, free, realloc, etc)
- asynchronous name resolving (*6)
- both a push and a pull style interface
- international domain names (*11)
HTTP
- HTTP/1.1 compliant (optionally uses 1.0)
@ -45,8 +43,8 @@ HTTP
- POST
- Pipelining
- multipart formpost (RFC1867-style)
- authentication: Basic, Digest, NTLM (*9) and Negotiate (SPNEGO) (*3)
to server and proxy
- authentication: Basic, Digest, NTLM(*9), GSS-Negotiate/Negotiate(*3) and
SPNEGO (*4) to server and proxy
- resume (both GET and PUT)
- follow redirects
- maximum amount of redirects to follow
@ -55,16 +53,14 @@ HTTP
- reads/writes the netscape cookie file format
- custom headers (replace/remove internally generated headers)
- custom user-agent string
- custom referrer string
- custom referer string
- range
- proxy authentication
- time conditions
- via http-proxy
- retrieve file modification date
- Content-Encoding support for deflate and gzip
- "Transfer-Encoding: chunked" support in uploads
- data compression (*12)
- HTTP/2 (*5)
- "Transfer-Encoding: chunked" support for "uploads"
HTTPS (*1)
- (all the HTTP features)
@ -72,12 +68,12 @@ HTTPS (*1)
- verify server certificate
- via http-proxy
- select desired encryption
- force usage of a specific SSL version (SSLv2 (*7), SSLv3 (*10) or TLSv1)
- force usage of a specific SSL version (SSLv2(*7), SSLv3 or TLSv1)
FTP
- download
- authentication
- Kerberos 5 (*14)
- kerberos4 (*5), kerberos5 (*3)
- active/passive using PORT, EPRT, PASV or EPSV
- single file size information (compare to HTTP HEAD)
- 'type=' URL support
@ -97,7 +93,7 @@ FTP
FTPS (*1)
- implicit ftps:// support that use SSL on both connections
- explicit "AUTH TLS" and "AUTH SSL" usage to "upgrade" plain ftp://
- explicit "AUTH TSL" and "AUTH SSL" usage to "upgrade" plain ftp://
connection to use SSL for both or one of the connections
SCP (*8)
@ -108,8 +104,7 @@ SFTP (*8)
- with custom commands sent before/after the transfer
TFTP
- download
- upload
- download / upload
TELNET
- connection negotiation
@ -124,83 +119,18 @@ DICT
FILE
- URL support
- upload
- "uploads"
- resume
SMB
- SMBv1 over TCP and SSL
- download
- upload
- authentication with NTLMv1
SMTP
- authentication: Plain, Login, CRAM-MD5, Digest-MD5, NTLM (*9), Kerberos 5
(*4) and External.
- send e-mails
- mail from support
- mail size support
- mail auth support for trusted server-to-server relaying
- multiple recipients
- via http-proxy
SMTPS (*1)
- implicit smtps:// support
- explicit "STARTTLS" usage to "upgrade" plain smtp:// connections to use SSL
- via http-proxy
POP3
- authentication: Clear Text, APOP and SASL
- SASL based authentication: Plain, Login, CRAM-MD5, Digest-MD5, NTLM (*9),
Kerberos 5 (*4) and External.
- list e-mails
- retrieve e-mails
- enhanced command support for: CAPA, DELE, TOP, STAT, UIDL and NOOP via
custom requests
- via http-proxy
POP3S (*1)
- implicit pop3s:// support
- explicit "STLS" usage to "upgrade" plain pop3:// connections to use SSL
- via http-proxy
IMAP
- authentication: Clear Text and SASL
- SASL based authentication: Plain, Login, CRAM-MD5, Digest-MD5, NTLM (*9),
Kerberos 5 (*4) and External.
- list the folders of a mailbox
- select a mailbox with support for verifying the UIDVALIDITY
- fetch e-mails with support for specifying the UID and SECTION
- upload e-mails via the append command
- enhanced command support for: EXAMINE, CREATE, DELETE, RENAME, STATUS,
STORE, COPY and UID via custom requests
- via http-proxy
IMAPS (*1)
- implicit imaps:// support
- explicit "STARTTLS" usage to "upgrade" plain imap:// connections to use SSL
- via http-proxy
FOOTNOTES
=========
*1 = requires OpenSSL, GnuTLS, NSS, yassl, axTLS, PolarSSL, WinSSL (native
Windows), Secure Transport (native iOS/OS X) or GSKit (native IBM i)
*1 = requires OpenSSL, GnuTLS, NSS, yassl, axTLS or PolarSSL
*2 = requires OpenLDAP
*3 = requires a GSS-API implementation (such as Heimdal or MIT Kerberos) or
SSPI (native Windows)
*4 = requires a GSS-API implementation, however, only Windows SSPI is
currently supported
*5 = requires nghttp2 and possibly a recent TLS library
*3 = requires a GSSAPI-compliant library, such as Heimdal or similar.
*4 = requires FBopenssl
*5 = requires a krb4 library, such as the MIT one or similar.
*6 = requires c-ares
*7 = requires OpenSSL, NSS, GSKit, WinSSL or Secure Transport; GnuTLS, for
example, only supports SSLv3 and TLSv1
*7 = requires OpenSSL or NSS, as GnuTLS only supports SSLv3 and TLSv1
*8 = requires libssh2
*9 = requires OpenSSL, GnuTLS, NSS, yassl, Secure Transport or SSPI (native
Windows)
*10 = requires any of the SSL libraries in (*1) above other than axTLS, which
does not support SSLv3
*11 = requires libidn or Windows
*12 = requires libz
*13 = requires libmetalink, and either an Apple or Microsoft operating
system, or OpenSSL, or GnuTLS, or NSS
*14 = requires a GSS-API implementation (such as Heimdal or MIT Kerberos)
*9 = requires OpenSSL, GnuTLS, NSS or yassl

View File

@ -4,30 +4,22 @@
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
How cURL Became Like This
=========================
How cURL Became Like This
Towards the end of 1996, Daniel Stenberg was spending time writing an IRC bot
for an Amiga related channel on EFnet. He then came up with the idea to make
In the second half of 1997, Daniel Stenberg came up with the idea to make
currency-exchange calculations available to Internet Relay Chat (IRC)
users. All the necessary data are published on the Web; he just needed to
automate their retrieval.
Daniel simply adopted an existing command-line open-source tool, httpget, that
Brazilian Rafael Sagula had written and recently release version 0.1 of. After
a few minor adjustments, it did just what he needed.
Brazilian Rafael Sagula had written. After a few minor adjustments, it did
just what he needed.
1997
----
HttpGet 1.0 was released on April 8th 1997 with brand new HTTP proxy support.
We soon found and fixed support for getting currencies over GOPHER. Once FTP
download support was added, the name of the project was changed and urlget 2.0
was released in August 1997. The http-only days were already passed.
1998
----
Soon, he found currencies on a GOPHER site, so support for that had to go in,
and not before long FTP download support was added as well. The name of the
project was changed to urlget to better fit what it actually did now, since
the http-only days were already passed.
The project slowly grew bigger. When upload capabilities were added and the
name once again was misleading, a second name change was made and on March 20,
@ -41,39 +33,33 @@ was revealed to us much later.)
SSL support was added, powered by the SSLeay library.
August, first announcement of curl on freshmeat.net.
August 1998, first announcement of curl on freshmeat.net.
October, with the curl 4.9 release and the introduction of cookie support,
curl was no longer released under the GPL license. Now we're at 4000 lines of
code, we switched over to the MPL license to restrict the effects of
October 1998, with the curl 4.9 release and the introduction of cookie
support, curl was no longer released under the GPL license. Now we're at 4000
lines of code, we switched over to the MPL license to restrict the effects of
"copyleft".
November, configure script and reported successful compiles on several
November 1998, configure script and reported successful compiles on several
major operating systems. The never-quite-understood -F option was added and
curl could now simulate quite a lot of a browser. TELNET support was added.
Curl 5 was released in December 1998 and introduced the first ever curl man
page. People started making Linux RPM packages out of it.
1999
----
January, DICT support added.
January 1999, DICT support added.
OpenSSL took over where SSLeay was abandoned.
May, first Debian package.
May 1999, first Debian package.
August, LDAP:// and FILE:// support added. The curl web site gets 1300 visits
weekly. Moved site to curl.haxx.nu.
August 1999, LDAP:// and FILE:// support added. The curl web site gets 1300
visits weekly.
Released curl 6.0 in September. 15000 lines of code.
December 28, added the project on Sourceforge and started using its services
for managing the project.
2000
----
December 28 1999, added the project on Sourceforge and started using its
services for managing the project.
Spring 2000, major internal overhaul to provide a suitable library interface.
The first non-beta release was named 7.1 and arrived in August. This offered
@ -81,24 +67,19 @@ the easy interface and turned out to be the beginning of actually getting
other software and programs to get based on and powered by libcurl. Almost
20000 lines of code.
June 2000: the curl site moves to "curl.haxx.se"
August, the curl web site gets 4000 visits weekly.
August 2000, the curl web site gets 4000 visits weekly.
The PHP guys adopted libcurl already the same month, when the first ever third
party libcurl binding showed up. CURL has been a supported module in PHP since
the release of PHP 4.0.2. This would soon get followers. More than 16
different bindings exist at the time of this writing.
September, kerberos4 support was added.
September 2000, kerberos4 support was added.
In November started the work on a test suite for curl. It was later re-written
from scratch again. The libcurl major SONAME number was set to 1.
In November 2000 started the work on a test suite for curl. It was later
re-written from scratch again. The libcurl major SONAME number was set to 1.
2001
----
January, Daniel released curl 7.5.2 under a new license again: MIT (or
January 2001, Daniel released curl 7.5.2 under a new license again: MIT (or
MPL). The MIT license is extremely liberal and can be used combined with GPL
in other projects. This would finally put an end to the "complaints" from
people involved in GPLed projects that previously were prohibited from using
@ -111,20 +92,17 @@ code. The libcurl major SONAME number was bumped to 2 due to this overhaul.
The first experimental ftps:// support was added in March 2001.
August. curl is bundled in Mac OS X, 10.1. It was already becoming more and
more of a standard utility of Linux distributions and a regular in the BSD
August 2001. curl is bundled in Mac OS X, 10.1. It was already becoming more
and more of a standard utility of Linux distributions and a regular in the BSD
ports collections. The curl web site gets 8000 visits weekly. Curl Corporation
contacted Daniel to discuss "the name issue". After Daniel's reply, they have
never since got in touch again.
September, libcurl 7.9 introduces cookie jar and curl_formadd(). During the
forthcoming 7.9.x releases, we introduced the multi interface slowly and
September 2001, libcurl 7.9 introduces cookie jar and curl_formadd(). During
the forthcoming 7.9.x releases, we introduced the multi interface slowly and
without much whistles.
2002
----
June, the curl web site gets 13000 visits weekly. curl and libcurl is
June 2002, the curl web site gets 13000 visits weekly. curl and libcurl is
35000 lines of code. Reported successful compiles on more than 40 combinations
of CPUs and operating systems.
@ -133,152 +111,134 @@ impossible. Around 5000 downloaded packages each week from the main site gives
a hint, but the packages are mirrored extensively, bundled with numerous OS
distributions and otherwise retrieved as part of other software.
September, with the release of curl 7.10 it is released under the MIT license
only.
September 2002, with the release of curl 7.10 it is released under the MIT
license only.
2003
----
January 2003. Started working on the distributed curl tests. The autobuilds.
January. Started working on the distributed curl tests. The autobuilds.
February, the curl site averages at 20000 visits weekly. At any given moment,
there's an average of 3 people browsing the curl.haxx.se site.
February 2003, the curl site averages at 20000 visits weekly. At any given
moment, there's an average of 3 people browsing the curl.haxx.se site.
Multiple new authentication schemes are supported: Digest (May), NTLM (June)
and Negotiate (June).
November: curl 7.10.8 is released. 45000 lines of code. ~55000 unique visitors
to the curl.haxx.se site. Five official web mirrors.
November 2003: curl 7.10.8 is released. 45000 lines of code. ~55000 unique
visitors to the curl.haxx.se site. Five official web mirrors.
December, full-fledged SSL for FTP is supported.
December 2003, full-fledged SSL for FTP is supported.
2004
----
January 2004: curl 7.11.0 introduced large file support.
January: curl 7.11.0 introduced large file support.
June 2004:
June: curl 7.12.0 introduced IDN support. 10 official web mirrors.
curl 7.12.0 introduced IDN support. 10 official web mirrors.
This release bumped the major SONAME to 3 due to the removal of the
curl_formparse() function
This release bumped the major SONAME to 3 due to the removal of the
curl_formparse() function
August: Curl and libcurl 7.12.1
August 2004:
Curl and libcurl 7.12.1
Public curl release number: 82
Releases counted from the very beginning: 109
Available command line options: 96
Available curl_easy_setopt() options: 120
Number of public functions in libcurl: 36
Amount of public web site mirrors: 12
Number of known libcurl bindings: 26
Public curl release number: 82
Releases counted from the very beginning: 109
Available command line options: 96
Available curl_easy_setopt() options: 120
Number of public functions in libcurl: 36
Amount of public web site mirrors: 12
Number of known libcurl bindings: 26
2005
----
April 2005:
April. GnuTLS can now optionally be used for the secure layer when curl is
built.
GnuTLS can now optionally be used for the secure layer when curl is built.
September: TFTP support was added.
September 2005:
More than 100,000 unique visitors of the curl web site. 25 mirrors.
TFTP support was added.
December: security vulnerability: libcurl URL Buffer Overflow
More than 100,000 unique visitors of the curl web site. 25 mirrors.
2006
----
December 2005:
January. We dropped support for Gopher. We found bugs in the implementation
that turned out having been introduced years ago, so with the conclusion that
nobody had found out in all this time we removed it instead of fixing it.
security vulnerability: libcurl URL Buffer Overflow
March: security vulnerability: libcurl TFTP Packet Buffer Overflow
January 2006:
April: Added the multi_socket() API
We dropped support for Gopher. We found bugs in the implementation that
turned out having been introduced years ago, so with the conclusion that
nobody had found out in all this time we removed it instead of fixing it.
September: The major SONAME number for libcurl was bumped to 4 due to the
removal of ftp third party transfer support.
March 2006:
November: Added SCP and SFTP support
security vulnerability: libcurl TFTP Packet Buffer Overflow
2007
----
April 2006:
February: Added support for the Mozilla NSS library to do the SSL/TLS stuff
Added the multi_socket() API
July: security vulnerability: libcurl GnuTLS insufficient cert verification
September 2006:
2008
----
The major SONAME number for libcurl was bumped to 4 due to the removal of
ftp third party transfer support.
November:
November 2006:
Command line options: 128
curl_easy_setopt() options: 158
Public functions in libcurl: 58
Known libcurl bindings: 37
Contributors: 683
Added SCP and SFTP support
February 2007:
Added support for the Mozilla NSS library to do the SSL/TLS stuff
July 2007:
security vulnerability: libcurl GnuTLS insufficient cert verification
November 2008:
Command line options: 128
curl_easy_setopt() options: 158
Public functions in libcurl: 58
Known libcurl bindings: 37
Contributors: 683
145,000 unique visitors. >100 GB downloaded.
2009
----
March 2009:
March: security vulnerability: libcurl Arbitrary File Access
security vulnerability: libcurl Arbitrary File Access
August: security vulnerability: libcurl embedded zero in cert name
August 2009:
December: Added support for IMAP, POP3 and SMTP
security vulnerability: libcurl embedded zero in cert name
2010
----
December 2009:
January: Added support for RTSP
Added support for IMAP, POP3 and SMTP
February: security vulnerability: libcurl data callback excessive length
January 2010:
March: The project switched over to use git (hosted by github) instead of CVS
for source code control
Added support for RTSP
May: Added support for RTMP
February 2010:
Added support for PolarSSL to do the SSL/TLS stuff
security vulnerability: libcurl data callback excessive length
August:
March 2010:
Public curl releases: 117
Command line options: 138
curl_easy_setopt() options: 180
Public functions in libcurl: 58
Known libcurl bindings: 39
Contributors: 808
The project switched over to use git instead of CVS for source code control
May 2010:
Added support for RTMP
Added support for PolarSSL to do the SSL/TLS stuff
August 2010:
Public curl releases: 117
Command line options: 138
curl_easy_setopt() options: 180
Public functions in libcurl: 58
Known libcurl bindings: 39
Contributors: 808
Gopher support added (re-added actually)
2012
----
July: Added support for Schannel (native Windows TLS backend) and Darwin SSL
(Native Mac OS X and iOS TLS backend).
Supports metalink
October: SSH-agent support.
2013
----
February: Cleaned up internals to always uses the "multi" non-blocking
approach internally and only expose the blocking API with a wrapper.
September: First small steps on supporting HTTP/2 with nghttp2.
October: Removed krb4 support.
December: Happy eyeballs.
2014
----
March: first real release supporting HTTP/2
September: Web site had 245,000 unique visitors and served 236GB data

View File

@ -1,123 +0,0 @@
Updated: July 3, 2012 (https://curl.haxx.se/docs/http-cookies.html)
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
HTTP Cookies
1. HTTP Cookies
1.1 Cookie overview
1.2 Cookies saved to disk
1.3 Cookies with curl the command line tool
1.4 Cookies with libcurl
1.5 Cookies with javascript
==============================================================================
1. HTTP Cookies
1.1 Cookie overview
HTTP cookies are pieces of 'name=contents' snippets that a server tells the
client to hold and then the client sends back those the server on subsequent
requests to the same domains/paths for which the cookies were set.
Cookies are either "session cookies" which typically are forgotten when the
session is over which is often translated to equal when browser quits, or
the cookies aren't session cookies they have expiration dates after which
the client will throw them away.
Cookies are set to the client with the Set-Cookie: header and are sent to
servers with the Cookie: header.
For a very long time, the only spec explaining how to use cookies was the
original Netscape spec from 1994: https://curl.haxx.se/rfc/cookie_spec.html
In 2011, RFC6265 (https://www.ietf.org/rfc/rfc6265.txt) was finally published
and details how cookies work within HTTP.
1.2 Cookies saved to disk
Netscape once created a file format for storing cookies on disk so that they
would survive browser restarts. curl adopted that file format to allow
sharing the cookies with browsers, only to see browsers move away from that
format. Modern browsers no longer use it, while curl still does.
The netscape cookie file format stores one cookie per physical line in the
file with a bunch of associated meta data, each field separated with
TAB. That file is called the cookiejar in curl terminology.
When libcurl saves a cookiejar, it creates a file header of its own in which
there is a URL mention that will link to the web version of this document.
1.3 Cookies with curl the command line tool
curl has a full cookie "engine" built in. If you just activate it, you can
have curl receive and send cookies exactly as mandated in the specs.
Command line options:
-b, --cookie
tell curl a file to read cookies from and start the cookie engine, or if
it isn't a file it will pass on the given string. -b name=var works and so
does -b cookiefile.
-j, --junk-session-cookies
when used in combination with -b, it will skip all "session cookies" on
load so as to appear to start a new cookie session.
-c, --cookie-jar
tell curl to start the cookie engine and write cookies to the given file
after the request(s)
1.4 Cookies with libcurl
libcurl offers several ways to enable and interface the cookie engine. These
options are the ones provided by the native API. libcurl bindings may offer
access to them using other means.
CURLOPT_COOKIE
Is used when you want to specify the exact contents of a cookie header to
send to the server.
CURLOPT_COOKIEFILE
Tell libcurl to activate the cookie engine, and to read the initial set of
cookies from the given file. Read-only.
CURLOPT_COOKIEJAR
Tell libcurl to activate the cookie engine, and when the easy handle is
closed save all known cookies to the given cookiejar file. Write-only.
CURLOPT_COOKIELIST
Provide detailed information about a single cookie to add to the internal
storage of cookies. Pass in the cookie as a HTTP header with all the
details set, or pass in a line from a netscape cookie file. This option
can also be used to flush the cookies etc.
CURLINFO_COOKIELIST
Extract cookie information from the internal cookie storage as a linked
list.
1.5 Cookies with javascript
These days a lot of the web is built up by javascript. The webbrowser loads
complete programs that render the page you see. These javascript programs
can also set and access cookies.
Since curl and libcurl are plain HTTP clients without any knowledge of or
capability to handle javascript, such cookies will not be detected or used.
Often, if you want to mimic what a browser does on such web sites, you can
record web browser HTTP traffic when using such a site and then repeat the
cookie operations using curl or libcurl.

View File

@ -1,111 +0,0 @@
HTTP/2 with curl
================
[HTTP/2 Spec](https://www.rfc-editor.org/rfc/rfc7540.txt)
[http2 explained](https://daniel.haxx.se/http2/)
Build prerequisites
-------------------
- nghttp2
- OpenSSL, NSS, GnutTLS or PolarSSL with a new enough version
[nghttp2](https://nghttp2.org/)
-------------------------------
libcurl uses this 3rd party library for the low level protocol handling
parts. The reason for this is that HTTP/2 is much more complex at that layer
than HTTP/1.1 (which we implement on our own) and that nghttp2 is an already
existing and well functional library.
We require at least version 1.0.0.
Over an http:// URL
-------------------
If `CURLOPT_HTTP_VERSION` is set to `CURL_HTTP_VERSION_2_0`, libcurl will
include an upgrade header in the initial request to the host to allow
upgrading to HTTP/2.
Possibly we can later introduce an option that will cause libcurl to fail if
not possible to upgrade. Possibly we introduce an option that makes libcurl
use HTTP/2 at once over http://
Over an https:// URL
--------------------
If `CURLOPT_HTTP_VERSION` is set to `CURL_HTTP_VERSION_2_0`, libcurl will use
ALPN (or NPN) to negotiate which protocol to continue with. Possibly introduce
an option that will cause libcurl to fail if not possible to use HTTP/2.
`CURL_HTTP_VERSION_2TLS` was added in 7.47.0 as a way to ask libcurl to prefer
HTTP/2 for HTTPS but stick to 1.1 by default for plain old HTTP connections.
ALPN is the TLS extension that HTTP/2 is expected to use. The NPN extension is
for a similar purpose, was made prior to ALPN and is used for SPDY so early
HTTP/2 servers are implemented using NPN before ALPN support is widespread.
`CURLOPT_SSL_ENABLE_ALPN` and `CURLOPT_SSL_ENABLE_NPN` are offered to allow
applications to explicitly disable ALPN or NPN.
SSL libs
--------
The challenge is the ALPN and NPN support and all our different SSL
backends. You may need a fairly updated SSL library version for it to
provide the necessary TLS features. Right now we support:
- OpenSSL: ALPN and NPN
- NSS: ALPN and NPN
- GnuTLS: ALPN
- PolarSSL: ALPN
Multiplexing
------------
Starting in 7.43.0, libcurl fully supports HTTP/2 multiplexing, which is the
term for doing multiple independent transfers over the same physical TCP
connection.
To take advantage of multiplexing, you need to use the multi interface and set
`CURLMOPT_PIPELINING` to `CURLPIPE_MULTIPLEX`. With that bit set, libcurl will
attempt to re-use existing HTTP/2 connections and just add a new stream over
that when doing subsequent parallel requests.
While libcurl sets up a connection to a HTTP server there is a period during
which it doesn't know if it can pipeline or do multiplexing and if you add new
transfers in that period, libcurl will default to start new connections for
those transfers. With the new option `CURLOPT_PIPEWAIT` (added in 7.43.0), you
can ask that a transfer should rather wait and see in case there's a
connection for the same host in progress that might end up being possible to
multiplex on. It favours keeping the number of connections low to the cost of
slightly longer time to first byte transferred.
Applications
------------
We hide HTTP/2's binary nature and convert received HTTP/2 traffic to headers
in HTTP 1.1 style. This allows applications to work unmodified.
curl tool
---------
curl offers the `--http2` command line option to enable use of HTTP/2.
Since 7.47.0, the curl tool enables HTTP/2 by default for HTTPS connections.
HTTP Alternative Services
-------------------------
Alt-Svc is a suggested extension with a corresponding frame (ALTSVC) in HTTP/2
that tells the client about an alternative "route" to the same content for the
same origin server that you get the response from. A browser or long-living
client can use that hint to create a new connection asynchronously. For
libcurl, we may introduce a way to bring such clues to the applicaton and/or
let a subsequent request use the alternate route
automatically. [Spec](https://tools.ietf.org/html/draft-ietf-httpbis-alt-svc-14)
TODO
----
- Implement "prior-knowledge" HTTP/2 connections over clear text so that
curl can connect with HTTP/2 at once without 1.1+Upgrade.

View File

@ -14,16 +14,9 @@ Installing Binary Packages
binary package. This document describes how to compile, build and install
curl and libcurl from source code.
Building from git
=================
If you get your code off a git repository, see the GIT-INFO file in the
root directory for specific instructions on how to proceed.
Unix
UNIX
====
A normal Unix installation is made in three or four steps (after you've
A normal unix installation is made in three or four steps (after you've
unpacked the source archive):
./configure
@ -116,6 +109,18 @@ Unix
./configure --disable-thread
To build curl with kerberos4 support enabled, curl requires the krb4 libs
and headers installed. You can then use a set of options to tell
configure where those are:
--with-krb4-includes[=DIR] Specify location of kerberos4 headers
--with-krb4-libs[=DIR] Specify location of kerberos4 libs
--with-krb4[=DIR] where to look for Kerberos4
In most cases, /usr/athena is the install prefix and then it works with
./configure --with-krb4=/usr/athena
If you're a curl developer and use gcc, you might want to enable more
debug options with the --enable-debug option.
@ -124,31 +129,30 @@ Unix
default. But if you want to alter it, you can select how to deal with
each individual library.
To build with GnuTLS for SSL/TLS, use both --without-ssl and
--with-gnutls.
To build with GnuTLS support instead of OpenSSL for SSL/TLS, note that
you need to use both --without-ssl and --with-gnutls.
To build with Cyassl for SSL/TLS, use both --without-ssl and
--with-cyassl.
To build with yassl support instead of OpenSSL or GnuTLS, you must build
yassl with its OpenSSL emulation enabled and point to that directory root
with configure --with-ssl.
To build with NSS for SSL/TLS, use both --without-ssl and --with-nss.
To build with NSS support instead of OpenSSL for SSL/TLS, note that
you need to use both --without-ssl and --with-nss.
To build with PolarSSL for SSL/TLS, use both --without-ssl and
--with-polarssl.
To build with PolarSSL support instead of OpenSSL for SSL/TLS, note that
you need to use both --without-ssl and --with-polarssl.
To build with axTLS for SSL/TLS, use both --without-ssl and --with-axtls.
To build with axTLS support instead of OpenSSL for TLS, note that you
need to use both --without-ssl and --with-axtls.
To build with GSS-API support, use --with-gssapi and have the MIT Kerberos
or Heimdal packages installed.
To get GSSAPI support, build with --with-gssapi and have the MIT or
Heimdal Kerberos 5 packages installed.
To get support for SCP and SFTP, build with --with-libssh2 and have
libssh2 0.16 or later installed.
To get Metalink support, build with --with-libmetalink and have the
libmetalink packages installed.
SPECIAL CASES
-------------
Some versions of uClibc require configuring with CPPFLAGS=-D_GNU_SOURCE=1
to get correct large file support.
@ -157,6 +161,7 @@ Unix
./configure CC=owcc AR="$WATCOM/binl/wlib" AR_FLAGS=-q \
RANLIB=/bin/true STRIP="$WATCOM/binl/wstrip" CFLAGS=-Wextra
Win32
=====
@ -173,21 +178,19 @@ Win32
advice given above.
KB94248 - How To Use the C Run-Time
https://support.microsoft.com/kb/94248/en-us
http://support.microsoft.com/kb/94248/en-us
KB140584 - How to link with the correct C Run-Time (CRT) library
https://support.microsoft.com/kb/140584/en-us
http://support.microsoft.com/kb/140584/en-us
KB190799 - Potential Errors Passing CRT Objects Across DLL Boundaries
https://msdn.microsoft.com/en-us/library/ms235460
http://msdn.microsoft.com/en-us/library/ms235460
If your app is misbehaving in some strange way, or it is suffering
from memory corruption, before asking for further help, please try
first to rebuild every single library your app uses as well as your
app using the debug multithreaded dynamic C runtime.
If you get linkage errors read section 5.7 of the FAQ document.
MingW32
-------
@ -208,9 +211,9 @@ Win32
adjust as necessary. It is also possible to override these paths with
environment variables, for example:
set ZLIB_PATH=c:\zlib-1.2.8
set OPENSSL_PATH=c:\openssl-1.0.2c
set LIBSSH2_PATH=c:\libssh2-1.6.0
set ZLIB_PATH=c:\zlib-1.2.5
set OPENSSL_PATH=c:\openssl-0.9.8r
set LIBSSH2_PATH=c:\libssh2-1.2.7
ATTENTION: if you want to build with libssh2 support you have to use latest
version 0.17 - previous versions will NOT work with 7.17.0 and later!
@ -232,7 +235,8 @@ Win32
- optional MingW32-built OpenLDAP SDK available from:
http://www.gknw.net/mirror/openldap/
- optional recent Novell CLDAP SDK available from:
https://www.novell.com/developer/ndk/ldap_libraries_for_c.html
http://developer.novell.com/ndk/cldap.htm
Cygwin
------
@ -251,10 +255,8 @@ Win32
MSVC 6 caveats
--------------
If you use MSVC 6 it is required that you use the February 2003 edition of
the 'Platform SDK' which can be downloaded from:
https://www.microsoft.com/en-us/download/details.aspx?id=12261
If you use MSVC 6 it is required that you use the February 2003 edition PSDK:
http://www.microsoft.com/msdownload/platformsdk/sdkupdate/psdk-full.htm
Building any software with MSVC 6 without having PSDK installed is just
asking for trouble down the road once you have released it, you might notice
@ -262,8 +264,10 @@ Win32
choice of static vs dynamic runtime and third party libraries. Anyone using
software built in such way will at some point regret having done so.
When someone uses MSVC 6 without PSDK he is using a compiler back from 1998.
If the compiler has been updated with the installation of a service pack as
those mentioned in https://support.microsoft.com/kb/194022 the compiler can be
those mentioned in http://support.microsoft.com/kb/194022 the compiler can be
safely used to read source code, translate and make it object code.
But, even with the service packs mentioned above installed, the resulting
@ -271,6 +275,13 @@ Win32
header files and libraries with bugs and security issues which have already
been addressed and fixed long time ago.
In order to make use of the updated system headers and fixed libraries
for MSVC 6, it is required that 'Platform SDK', PSDK from now onwards,
is installed. The specific PSDK that must be installed for MSVC 6 is the
February 2003 edition, which is the latest one supporting the MSVC 6 compiler,
this PSDK is also known as 'Windows Server 2003 PSDK' and can be downloaded
from http://www.microsoft.com/msdownload/platformsdk/sdkupdate/psdk-full.htm
So, building curl and libcurl with MSVC 6 without PSDK is absolutely
discouraged for the benefit of anyone using software built in such
environment. And it will not be supported in any way, as we could just
@ -299,11 +310,11 @@ Win32
Then run 'nmake vc' in curl's root directory.
If you want to compile with zlib support, you will need to build
zlib (http://www.zlib.net/) as well. Please read the zlib
zlib (http://www.gzip.org/zlib/) as well. Please read the zlib
documentation on how to compile zlib. Define the ZLIB_PATH environment
variable to the location of zlib.h and zlib.lib, for example:
set ZLIB_PATH=c:\zlib-1.2.8
set ZLIB_PATH=c:\zlib-1.2.5
Then run 'nmake vc-zlib' in curl's root directory.
@ -317,7 +328,7 @@ Win32
Before running nmake define the OPENSSL_PATH environment variable with
the root/base directory of OpenSSL, for example:
set OPENSSL_PATH=c:\openssl-0.9.8zc
set OPENSSL_PATH=c:\openssl-0.9.8q
Then run 'nmake vc-ssl' or 'nmake vc-ssl-dll' in curl's root
directory. 'nmake vc-ssl' will create a libcurl static and dynamic
@ -332,18 +343,39 @@ Win32
at runtime.
Run 'nmake vc-ssl-zlib' to build with both ssl and zlib support.
MSVC IDE
--------
MSVC 6 IDE
----------
A fairly comprehensive set of Visual Studio project files are available for
v6.0 through v12.0 and are located in the projects folder to allow proper
building of both the libcurl library as well as the curl tool.
A minimal VC++ 6.0 reference workspace (vc6curl.dsw) is available with the
source distribution archive to allow proper building of the two included
projects, the libcurl library and the curl tool.
For more information about these projects and building via Visual Studio
please see the README file located in the projects folder.
1) Open the vc6curl.dsw workspace with MSVC6's IDE.
2) Select 'Build' from top menu.
3) Select 'Batch Build' from dropdown menu.
4) Make sure that the eight project configurations are 'checked'.
5) Click on the 'Build' button.
6) Once the eight project configurations are built you are done.
Dynamic and static libcurl libraries are built in debug and release flavours,
and can be located each one in its own subdirectory, DLL-Debug, DLL-Release,
LIB-Debug and LIB-Release, all of them below the 'lib' subdirectory.
In the same way four curl executables are created, each using its respective
library. The resulting curl executables are located in its own subdirectory,
DLL-Debug, DLL-Release, LIB-Debug and LIB-Release, below the 'src' subdir.
These reference VC++ 6.0 configurations are generated using the dynamic CRT.
Intentionally, these reference VC++ 6.0 projects and configurations don't use
third party libraries, such as OpenSSL or Zlib, to allow proper compilation
and configuration for all new users without further requirements.
If you need something more 'involved' you might adjust them for your own use,
or explore the world of makefiles described above 'MSVC from command line'.
Borland C++ compiler
--------------------
---------------------
Ensure that your build environment is properly set up to use the compiler
and associated tools. PATH environment variable must include the path to
@ -354,7 +386,7 @@ Win32
set BCCDIR=c:\Borland\BCC55
In order to build a plain vanilla version of curl and libcurl run the
In order to build a plain vanilla version of curl and libcurl run the
following command from curl's root directory:
make borland
@ -388,6 +420,7 @@ Win32
is required, as well as the OpenSSL libeay32.lib and ssleay32.lib
libraries.
OTHER MSVC IDEs
---------------
@ -398,6 +431,7 @@ Win32
Make the sources in the src/ drawer be a "win32 console application"
project. Name it curl.
Disabling Specific Protocols in Win32 builds
--------------------------------------------
@ -416,53 +450,16 @@ Win32
CURL_DISABLE_FILE disables FILE
CURL_DISABLE_TFTP disables TFTP
CURL_DISABLE_HTTP disables HTTP
CURL_DISABLE_IMAP disables IMAP
CURL_DISABLE_POP3 disables POP3
CURL_DISABLE_SMTP disables SMTP
If you want to set any of these defines you have the following options:
If you want to set any of these defines you have the following
possibilities:
- Modify lib/config-win32.h
- Modify lib/curl_setup.h
- Modify lib/setup.h
- Modify lib/Makefile.vc6
- Modify the "Preprocessor Definitions" in the libcurl project
- Add defines to Project/Settings/C/C++/General/Preprocessor Definitions
in the vc6libcurl.dsw/vc6libcurl.dsp Visual C++ 6 IDE project.
Note: The pre-processor settings can be found using the Visual Studio IDE
under "Project -> Settings -> C/C++ -> General" in VC6 and "Project ->
Properties -> Configuration Properties -> C/C++ -> Preprocessor" in later
versions.
Using BSD-style lwIP instead of Winsock TCP/IP stack in Win32 builds
--------------------------------------------------------------------
In order to compile libcurl and curl using BSD-style lwIP TCP/IP stack
it is necessary to make definition of preprocessor symbol USE_LWIPSOCK
visible to libcurl and curl compilation processes. To set this definition
you have the following alternatives:
- Modify lib/config-win32.h and src/config-win32.h
- Modify lib/Makefile.vc6
- Modify the "Preprocessor Definitions" in the libcurl project
Note: The pre-processor settings can be found using the Visual Studio IDE
under "Project -> Settings -> C/C++ -> General" in VC6 and "Project ->
Properties -> Configuration Properties -> C/C++ -> Preprocessor" in later
versions.
Once that libcurl has been built with BSD-style lwIP TCP/IP stack support,
in order to use it with your program it is mandatory that your program
includes lwIP header file <lwip/opt.h> (or another lwIP header that includes
this) before including any libcurl header. Your program does not need the
USE_LWIPSOCK preprocessor definition which is for libcurl internals only.
Compilation has been verified with lwIP 1.4.0 and contrib-1.4.0 from:
http://download.savannah.gnu.org/releases/lwip/lwip-1.4.0.zip
http://download.savannah.gnu.org/releases/lwip/contrib-1.4.0.zip
This BSD-style lwIP TCP/IP stack support must be considered experimental
given that it has been verified that lwIP 1.4.0 still needs some polish,
and libcurl might yet need some additional adjustment, caveat emptor.
Important static libcurl usage note
-----------------------------------
@ -471,50 +468,9 @@ Win32
add '-DCURL_STATICLIB' to your CFLAGS. Otherwise the linker will look for
dynamic import symbols.
Legacy Windows and SSL
----------------------
WinSSL (specifically SChannel from Windows SSPI), is the native SSL library
in Windows. However, WinSSL in Windows <= XP is unable to connect to servers
that no longer support the legacy handshakes and algorithms used by those
versions. If you will be using curl in one of those earlier versions of
Windows you should choose another SSL backend such as OpenSSL.
Apple iOS and Mac OS X
======================
On recent Apple operating systems, curl can be built to use Apple's
SSL/TLS implementation, Secure Transport, instead of OpenSSL. To build with
Secure Transport for SSL/TLS, use the configure option --with-darwinssl. (It
is not necessary to use the option --without-ssl.) This feature requires iOS
5.0 or later, or OS X 10.5 ("Leopard") or later.
When Secure Transport is in use, the curl options --cacert and --capath and
their libcurl equivalents, will be ignored, because Secure Transport uses
the certificates stored in the Keychain to evaluate whether or not to trust
the server. This, of course, includes the root certificates that ship with
the OS. The --cert and --engine options, and their libcurl equivalents, are
currently unimplemented in curl with Secure Transport.
For OS X users: In OS X 10.8 ("Mountain Lion"), Apple made a major
overhaul to the Secure Transport API that, among other things, added
support for the newer TLS 1.1 and 1.2 protocols. To get curl to support
TLS 1.1 and 1.2, you must build curl on Mountain Lion or later, or by
using the equivalent SDK. If you set the MACOSX_DEPLOYMENT_TARGET
environmental variable to an earlier version of OS X prior to building curl,
then curl will use the new Secure Transport API on Mountain Lion and later,
and fall back on the older API when the same curl binary is executed on
older cats. For example, running these commands in curl's directory in the
shell will build the code such that it will run on cats as old as OS X 10.6
("Snow Leopard") (using bash):
export MACOSX_DEPLOYMENT_TARGET="10.6"
./configure --with-darwinssl
make
IBM OS/2
========
Building under OS/2 is not much different from building under unix.
You need:
@ -542,15 +498,15 @@ IBM OS/2
If you're getting huge binaries, probably your makefiles have the -g in
CFLAGS.
VMS
===
(The VMS section is in whole contributed by the friendly Nico Baggus)
Curl seems to work with FTP & HTTP other protocols are not tested. (the
perl http/ftp testing server supplied as testing too cannot work on VMS
because vms has no concept of fork(). [ I tried to give it a whack, but
that's of no use.
thats of no use.
SSL stuff has not been ported.
@ -589,7 +545,6 @@ VMS
the name can be fetched from external or internal message libraries
Error code - the err codes assigned by the application
Sev. - severity: Even = error, off = non error
0 = Warning
1 = Success
2 = Error
@ -611,13 +566,12 @@ VMS
Compaq C V6.2-003 on OpenVMS Alpha V7.1-1H2
So far for porting notes as of:
13-jul-2001
N. Baggus
QNX
===
(This section was graciously brought to us by David Bentham)
As QNX is targeted for resource constrained environments, the QNX headers
@ -628,12 +582,11 @@ QNX
A good all-round solution to this is to override the default when building
libcurl, by overriding CFLAGS during configure, example
# configure CFLAGS='-DFD_SETSIZE=64 -g -O2'
RISC OS
=======
The library can be cross-compiled using gccsdk as follows:
CC=riscos-gcc AR=riscos-ar RANLIB='riscos-ar -s' ./configure \
@ -643,9 +596,9 @@ RISC OS
where riscos-gcc and riscos-ar are links to the gccsdk tools.
You can then link your program with curl/lib/.libs/libcurl.a
AmigaOS
=======
(This section was graciously brought to us by Diego Casorran)
To build cURL/libcurl on AmigaOS just type 'make amiga' ...
@ -665,19 +618,20 @@ AmigaOS
To enable SSL support, you need a OpenSSL native version (without ixemul),
you can find a precompiled package at http://amiga.sourceforge.net/OpenSSL/
NetWare
=======
To compile curl.nlm / libcurl.nlm you need:
- either any gcc / nlmconv, or CodeWarrior 7 PDK 4 or later.
- gnu make and awk running on the platform you compile on;
native Win32 versions can be downloaded from:
http://www.gknw.net/development/prgtools/
- recent Novell LibC or Novell CLib SDK available from:
https://www.novell.com/developer/ndk/
- recent Novell LibC SDK available from:
http://developer.novell.com/ndk/libc.htm
- or recent Novell CLib SDK available from:
http://developer.novell.com/ndk/clib.htm
- optional recent Novell CLDAP SDK available from:
https://www.novell.com/developer/ndk/ldap_libraries_for_c.html
http://developer.novell.com/ndk/cldap.htm
- optional zlib sources (static or dynamic linking with zlib.imp);
sources with NetWare Makefile can be obtained from:
http://www.gknw.net/mirror/zlib/
@ -685,7 +639,7 @@ NetWare
you can find precompiled packages at:
http://www.gknw.net/development/ossl/netware/
for CLIB-based builds OpenSSL 0.9.8h or later is required - earlier versions
don't support building with CLIB BSD sockets.
dont support buildunf with CLIB BSD sockets.
- optional SSH2 sources (version 0.17 or later);
Set a search path to your compiler, linker and tools; on Linux make
@ -706,11 +660,11 @@ NetWare
Builds automatically created 8 times a day from current git are here:
http://www.gknw.net/mirror/curl/autobuilds/
the status of these builds can be viewed at the autobuild table:
https://curl.haxx.se/dev/builds.html
http://curl.haxx.se/dev/builds.html
eCos
====
curl does not use the eCos build system, so you must first build eCos
separately, then link curl to the resulting eCos library. Here's a sample
configure line to do so on an x86 Linux box targeting x86:
@ -778,9 +732,9 @@ eCos
config.errors = stderr; /* default errors to stderr */
Minix
=====
curl can be compiled on Minix 3 using gcc or ACK (starting with
ver. 3.1.3). Ensure that GNU gawk and bash are both installed and
available in the PATH.
@ -810,9 +764,9 @@ Minix
make
chmem =256000 src/curl
Symbian OS
==========
The Symbian OS port uses the Symbian build system to compile. From the
packages/Symbian/group/ directory, run:
@ -823,16 +777,16 @@ Symbian OS
SDK doesn't include support for P.I.P.S., you will need to contact
your SDK vendor to obtain that first.
VxWorks
========
Build for VxWorks is performed using cross compilation.
That means you build on Windows machine using VxWorks tools and
run the built image on the VxWorks device.
To build libcurl for VxWorks you need:
- CYGWIN (free, https://cygwin.com/)
- CYGWIN (free, http://cygwin.com/)
- Wind River Workbench (commercial)
If you have CYGWIN and Workbench installed on you machine
@ -849,54 +803,14 @@ VxWorks
As a result the libcurl.a library should be created in the 'lib' folder.
To clean the build results type 'make -f ./Makefile.vxworks clean'.
Android
=======
See the build notes in the Android.mk file.
Method using the static makefile:
- see the build notes in the packages/Android/Android.mk file.
Method using a configure cross-compile (tested with Android NDK r7c, r8):
- prepare the toolchain of the Android NDK for standalone use; this can
be done by invoking the script:
./build/tools/make-standalone-toolchain.sh
which creates a usual cross-compile toolchain. Lets assume that you put
this toolchain below /opt then invoke configure with something like:
export PATH=/opt/arm-linux-androideabi-4.4.3/bin:$PATH
./configure --host=arm-linux-androideabi [more configure options]
make
- if you want to compile directly from our GIT repo you might run into
this issue with older automake stuff:
checking host system type...
Invalid configuration `arm-linux-androideabi':
system `androideabi' not recognized
configure: error: /bin/sh ./config.sub arm-linux-androideabi failed
this issue can be fixed with using more recent versions of config.sub
and config.guess which can be obtained here:
http://git.savannah.gnu.org/gitweb/?p=config.git;a=tree
you need to replace your system-own versions which usually can be
found in your automake folder:
find /usr -name config.sub
Wrapper for pkg-config:
- In order to make proper use of pkg-config so that configure is able to
find all dependencies you should create a wrapper script for pkg-config;
file /opt/arm-linux-androideabi-4.4.3/bin/arm-linux-androideabi-pkg-config:
#!/bin/sh
SYSROOT=$(dirname ${0%/*})/sysroot
export PKG_CONFIG_DIR=
export PKG_CONFIG_LIBDIR=${SYSROOT}/usr/local/lib/pkgconfig:${SYSROOT}/usr/share/pkgconfig
export PKG_CONFIG_SYSROOT_DIR=${SYSROOT}
exec pkg-config "$@"
also create a copy or symlink with name arm-unknown-linux-androideabi-pkg-config.
CROSS COMPILE
=============
(This section was graciously brought to us by Jim Duey, with additions by
Dan Fandrich)
@ -942,18 +856,17 @@ CROSS COMPILE
./configure --host=ARCH-OS
REDUCING SIZE
=============
There are a number of configure options that can be used to reduce the
size of libcurl for embedded applications where binary size is an
important factor. First, be sure to set the CFLAGS variable when
configuring with any relevant compiler optimization flags to reduce the
size of the binary. For gcc, this would mean at minimum the -Os option,
and potentially the -march=X, -mdynamic-no-pic and -flto options as well,
e.g.
and potentially the -march=X and -mdynamic-no-pic options as well, e.g.
./configure CFLAGS='-Os' LDFLAGS='-Wl,-Bsymbolic'...
./configure CFLAGS='-Os' ...
Note that newer compilers often produce smaller code than older versions
due to improved optimization.
@ -971,22 +884,17 @@ REDUCING SIZE
--disable-ipv6 (disables support for IPv6)
--disable-manual (disables support for the built-in documentation)
--disable-proxy (disables support for HTTP and SOCKS proxies)
--disable-unix-sockets (disables support for UNIX sockets)
--disable-verbose (eliminates debugging strings and error code strings)
--disable-versioned-symbols (disables support for versioned symbols)
--enable-hidden-symbols (eliminates unneeded symbols in the shared library)
--without-libidn (disables support for the libidn DNS library)
--without-librtmp (disables support for RTMP)
--without-ssl (disables support for SSL/TLS)
--without-zlib (disables support for on-the-fly decompression)
The GNU compiler and linker have a number of options that can reduce the
size of the libcurl dynamic libraries on some platforms even further.
Specify them by providing appropriate CFLAGS and LDFLAGS variables on the
configure command-line, e.g.
CFLAGS="-Os -ffunction-sections -fdata-sections \
-fno-unwind-tables -fno-asynchronous-unwind-tables -flto" \
configure command-line:
CFLAGS="-ffunction-sections -fdata-sections" \
LDFLAGS="-Wl,-s -Wl,-Bsymbolic -Wl,--gc-sections"
Be sure also to strip debugging symbols from your binaries after
@ -996,9 +904,9 @@ REDUCING SIZE
.comment section).
Using these techniques it is possible to create a basic HTTP-only shared
libcurl library for i386 Linux platforms that is only 109 KiB in size, and
an FTP-only library that is 109 KiB in size (as of libcurl version 7.45.0,
using gcc 4.9.2).
libcurl library for i386 Linux platforms that is only 101 KiB in size, and
an FTP-only library that is 105 KiB in size (as of libcurl version 7.21.5,
using gcc 4.4.3).
You may find that statically linking libcurl to your application will
result in a lower total size than dynamically linking.
@ -1010,12 +918,13 @@ REDUCING SIZE
command line. Following is a list of appropriate key words:
--disable-cookies !cookies
--disable-crypto-auth !HTTP\ Digest\ auth !HTTP\ proxy\ Digest\ auth
--disable-manual !--manual
--disable-proxy !HTTP\ proxy !proxytunnel !SOCKS4 !SOCKS5
PORTS
=====
This is a probably incomplete list of known hardware and operating systems
that curl has been compiled for. If you know a system curl compiles and
runs on, that isn't listed, please let us know!
@ -1029,9 +938,9 @@ PORTS
- Alpha OpenVMS V7.1-1H2
- Alpha Tru64 v5.0 5.1
- AVR32 Linux
- ARM Android 1.5, 2.1, 2.3, 3.2, 4.x
- ARM Android 1.5, 2.1
- ARM INTEGRITY
- ARM iOS
- ARM iPhone OS
- Cell Linux
- Cell Cell OS
- HP-PA HP-UX 9.X 10.X 11.X
@ -1069,7 +978,6 @@ PORTS
- i386 HURD
- i386 Haiku OS
- i386 Linux 1.3, 2.0, 2.2, 2.3, 2.4, 2.6
- i386 Mac OS X
- i386 MINIX 3.1
- i386 NetBSD
- i386 Novell NetWare
@ -1096,18 +1004,17 @@ Useful URLs
axTLS http://axtls.sourceforge.net/
c-ares http://c-ares.haxx.se/
GNU GSS https://www.gnu.org/software/gss/
GnuTLS https://www.gnu.org/software/gnutls/
Heimdal http://www.h5l.org/
libidn https://www.gnu.org/software/libidn/
libmetalink https://launchpad.net/libmetalink/
GNU GSS http://www.gnu.org/software/gss/
GnuTLS http://www.gnu.org/software/gnutls/
Heimdal http://www.pdc.kth.se/heimdal/
libidn http://www.gnu.org/software/libidn/
libssh2 http://www.libssh2.org/
MIT Kerberos http://web.mit.edu/kerberos/www/dist/
NSS https://developer.mozilla.org/en-US/docs/Mozilla/Projects/NSS
NSS http://www.mozilla.org/projects/security/pki/nss/
OpenLDAP http://www.openldap.org/
OpenSSL https://www.openssl.org/
PolarSSL https://tls.mbed.org/
wolfSSL https://www.wolfssl.com/wolfSSL/
OpenSSL http://www.openssl.org/
PolarSSL http://polarssl.org/
yassl http://www.yassl.com/
Zlib http://www.zlib.net/
MingW http://www.mingw.org/

71
docs/INSTALL.cmake Normal file → Executable file
View File

@ -11,70 +11,35 @@ Building with CMake
This document describes how to compile, build and install curl and libcurl
from source code using the CMake build tool. To build with CMake, you will
of course have to first install CMake. The minimum required version of
CMake is specified in the file CMakeLists.txt found in the top of the curl
CMake is specifed in the file CMakeLists.txt found in the top of the curl
source tree. Once the correct version of CMake is installed you can follow
the instructions below for the platform you are building on.
CMake builds can be configured either from the command line, or from one
of CMake's GUI's.
Current flaws in the curl CMake build
=====================================
Missing features in the cmake build:
- Builds libcurl without large file support
- Can't select which SSL library to build with, only OpenSSL
- Doesn't build with SCP and SFTP support (libssh2)
- Doesn't allow different resolver backends (no c-ares build support)
- No RTMP support built
- Doesn't allow build curl and libcurl debug enabled
- Doesn't allow a custom CA bundle path
- Doesn't allow you to disable specific protocols from the build
- Doesn't find or use krb4 or GSS
- Rebuilds test files too eagerly, but still can't run the tests
Important notice
==================
If you got your curl sources from a distribution tarball, make sure to
delete the generic 'include/curl/curlbuild.h' file that comes with it:
rm -f curl/include/curl/curlbuild.h
The purpose of this file is to provide reasonable definitions for systems
where autoconfiguration is not available. CMake will create its own
version of this file in its build directory. If the "generic" version
is not deleted, weird build errors may occur on some systems.
Command Line CMake
==================
A CMake build of curl is similar to the autotools build of curl. It
A command line build of Curl is similar to the autotools build of Curl. It
consists of the following steps after you have unpacked the source.
1. Create an out of source build tree parallel to the curl source
tree and change into that directory
$ mkdir curl-build
$ cd curl-build
2. Run CMake from the build tree, giving it the path to the top of
the curl source tree. CMake will pick a compiler for you. If you
want to specify the compile, you can set the CC environment
variable prior to running CMake.
$ cmake ../curl
$ make
3. Install to default location:
$ make install
(The test suite does not work with the cmake build)
# 1st create an out of source build tree parallel to the curl source
# tree and change into that directory
mkdir curl-build
cd curl-build
# now run CMake from the build tree, giving it the path to the top of
# the Curl source tree. CMake will pick a compiler for you. If you
# want to specifiy the compile, you can set the CC environment
# variable prior to running CMake.
cmake ../curl
make
# currently make test and make install are not implemented
#make test
#make install
ccmake
=========
CMake comes with a curses based interface called ccmake. To run ccmake on
a curl use the instructions for the command line cmake, but substitute
a curl use the instructions for the command line cmake, but substitue
ccmake ../curl for cmake ../curl. This will bring up a curses interface
with instructions on the bottom of the screen. You can press the "c" key
to configure the project, and the "g" key to generate the project. After
@ -88,7 +53,7 @@ cmake-gui
the curl source tree.
2. Fill in the "Where to build the binaries" combo box with the path
to the directory for your build tree, ideally this should not be the
same as the source tree, but a parallel directory called curl-build or
same as the source tree, but a parallel diretory called curl-build or
something similar.
3. Once the source and binary directories are specified, press the
"Configure" button.
@ -96,5 +61,5 @@ cmake-gui
5. At this point you can change any of the options presented in the
GUI. Once you have selected all the options you want, click the
"Generate" button.
6. Run the native build tool that you used CMake to generate.
6. Run the native build tool that you used CMake to genratate.

View File

@ -26,7 +26,7 @@ exists for a Unix/linux command line environments. This is of little help when
it comes to Windows O/S.
Secondly the help that does exist for the Windows O/S focused around mingw
through a command line argument environment.
thru a command line argument environment.
You may ask "Why is this a problem?"
@ -95,7 +95,7 @@ install instructions may produce erratic behaviour in DevCpp. For further info
check the following sites
http://aditsu.freeunixhost.com/dev-cpp-faq.html
https://sourceforge.net/p/dev-cpp/discussion/48211/thread/2a85ea46
http://sourceforge.net/forum/message.php?msg_id=3252213
As I have mentioned before I will confine this to the SSL Library compilations
but the process is very similar for compilation of the executable - curl.exe;
@ -248,7 +248,7 @@ SSL Files
openssl-0.9.7e-win32-bin.zip for the minimalist package of the openssl-0.9.7e
binaries ported to MS Windows 95/98/NT/XP using the MingW32/GCC-3.1
development environment. The file may be downloaded at
https://curl.haxx.se/download/.
http://curl.haxx.se/download/.
2- Open the above zip file. You will find two files - SDL.dll,
SDL_mixer.dll. Install them in the directory C:\WINDOWS\SYSTEM32 for Win 9x

File diff suppressed because it is too large Load Diff

View File

@ -3,116 +3,45 @@ join in and help us correct one or more of these! Also be sure to check the
changelog of the current development status, as one or more of these problems
may have been fixed since this was written!
93. It is not possible to pass a 64-bit value using CURLFORM_CONTENTLEN with
CURLFORM_ARRAY, when compiled on 32-bit platforms that support 64-bit
integers. This is because the underlying structure 'curl_forms' uses a dual
purpose char* for storing these values in via casting. For more information
see the now closed related issue:
https://github.com/curl/curl/issues/608
92. curl tool 7.47.1 in Windows will not --output to literal paths \\?\ or to
reserved dos device names unless the device prefix \\.\ is used. To send
output to a device that has a reserved dos device name you can use the
Windows device prefix (eg: --output \\.\NUL). You can also use the
redirection operator to send output to a literal path or a reserved device
name (eg: > NUL).
The next release of curl will support --output in Windows to literal paths
and to reserved device names without the device prefix.
https://github.com/curl/curl/commit/c3aac48
https://github.com/curl/curl/commit/4fc80f3
76. The SOCKET type in Win64 is 64 bits large (and thus so is curl_socket_t on
that platform), and long is only 32 bits. It makes it impossible for
curl_easy_getinfo() to return a socket properly with the CURLINFO_LASTSOCKET
option as for all other operating systems.
91. "curl_easy_perform hangs with imap and PolarSSL"
https://github.com/curl/curl/issues/334
75. NTLM authentication involving unicode user name or password.
http://curl.haxx.se/mail/lib-2009-10/0024.html
http://curl.haxx.se/bug/view.cgi?id=2944325
90. IMAP "SEARCH ALL" truncates output on large boxes. "A quick search of the
code reveals that pingpong.c contains some truncation code, at line 408,
when it deems the server response to be too large truncating it to 40
characters"
https://curl.haxx.se/bug/view.cgi?id=1366
89. Disabling HTTP Pipelining when there are ongoing transfers can lead to
heap corruption and crash. https://curl.haxx.se/bug/view.cgi?id=1411
88. libcurl doesn't support CURLINFO_FILETIME for SFTP transfers and thus
curl's -R option also doesn't work then.
87. -J/--remote-header-name doesn't decode %-encoded file names. RFC6266
details how it should be done. The can of worm is basically that we have no
charset handling in curl and ascii >=128 is a challenge for us. Not to
mention that decoding also means that we need to check for nastiness that is
attempted, like "../" sequences and the like. Probably everything to the left
of any embedded slashes should be cut off.
https://curl.haxx.se/bug/view.cgi?id=1294
86. The disconnect commands (LOGOUT and QUIT) may not be sent by IMAP, POP3
and SMTP if a failure occurs during the authentication phase of a
connection.
85. Wrong STARTTRANSFER timer accounting for POST requests
Timer works fine with GET requests, but while using POST the time for
CURLINFO_STARTTRANSFER_TIME is wrong. While using POST
CURLINFO_STARTTRANSFER_TIME minus CURLINFO_PRETRANSFER_TIME is near to zero
every time.
https://github.com/curl/curl/issues/218
https://curl.haxx.se/bug/view.cgi?id=1213
84. CURLINFO_SSL_VERIFYRESULT is only implemented for the OpenSSL and NSS
backends, so relying on this information in a generic app is flaky.
82. When building with the Windows Borland compiler, it fails because the
"tlib" tool doesn't support hyphens (minus signs) in file names and we have
such in the build.
https://curl.haxx.se/bug/view.cgi?id=1222
81. When using -J (with -O), automatically resumed downloading together with
"-C -" fails. Without -J the same command line works! This happens because
the resume logic is worked out before the target file name (and thus its
pre-transfer size) has been figured out!
https://curl.haxx.se/bug/view.cgi?id=1169
80. Curl doesn't recognize certificates in DER format in keychain, but it
works with PEM.
https://curl.haxx.se/bug/view.cgi?id=1065
79. SMTP. When sending data to multiple recipients, curl will abort and return
failure if one of the recipients indicate failure (on the "RCPT TO"
command). Ordinary mail programs would proceed and still send to the ones
that can receive data. This is subject for change in the future.
https://curl.haxx.se/bug/view.cgi?id=1116
75. NTLM authentication involving unicode user name or password only works
properly if built with UNICODE defined together with the WinSSL/schannel
backend. The original problem was mentioned in:
https://curl.haxx.se/mail/lib-2009-10/0024.html
https://curl.haxx.se/bug/view.cgi?id=896
The WinSSL/schannel version verified to work as mentioned in
https://curl.haxx.se/mail/lib-2012-07/0073.html
74. The HTTP spec allows headers to be merged and become comma-separated
instead of being repeated several times. This also include Authenticate: and
Proxy-Authenticate: headers and while this hardly every happens in real life
it will confuse libcurl which does not properly support it for all headers -
like those Authenticate headers.
73. if a connection is made to a FTP server but the server then just never
sends the 220 response or otherwise is dead slow, libcurl will not
acknowledge the connection timeout during that phase but only the "real"
timeout - which may surprise users as it is probably considered to be the
connect phase to most people. Brought up (and is being misunderstood) in:
https://curl.haxx.se/bug/view.cgi?id=856
http://curl.haxx.se/bug/view.cgi?id=2844077
72. "Pausing pipeline problems."
https://curl.haxx.se/mail/lib-2009-07/0214.html
http://curl.haxx.se/mail/lib-2009-07/0214.html
70. Problem re-using easy handle after call to curl_multi_remove_handle
https://curl.haxx.se/mail/lib-2009-07/0249.html
http://curl.haxx.se/mail/lib-2009-07/0249.html
68. "More questions about ares behavior".
https://curl.haxx.se/mail/lib-2009-08/0012.html
http://curl.haxx.se/mail/lib-2009-08/0012.html
67. When creating multipart formposts. The file name part can be encoded with
something beyond ascii but currently libcurl will only pass in the verbatim
string the app provides. There are several browsers that already do this
encoding. The key seems to be the updated draft to RFC2231:
https://tools.ietf.org/html/draft-reschke-rfc2231-in-http-02
http://tools.ietf.org/html/draft-reschke-rfc2231-in-http-02
66. When using telnet, the time limitation options don't work.
https://curl.haxx.se/bug/view.cgi?id=846
http://curl.haxx.se/bug/view.cgi?id=2818950
65. When doing FTP over a socks proxy or CONNECT through HTTP proxy and the
multi interface is used, libcurl will fail if the (passive) TCP connection
@ -123,27 +52,34 @@ may have been fixed since this was written!
63. When CURLOPT_CONNECT_ONLY is used, the handle cannot reliably be re-used
for any further requests or transfers. The work-around is then to close that
handle with curl_easy_cleanup() and create a new. Some more details:
https://curl.haxx.se/mail/lib-2009-04/0300.html
http://curl.haxx.se/mail/lib-2009-04/0300.html
61. If an upload using Expect: 100-continue receives an HTTP 417 response,
it ought to be automatically resent without the Expect:. A workaround is
for the client application to redo the transfer after disabling Expect:.
https://curl.haxx.se/mail/archive-2008-02/0043.html
http://curl.haxx.se/mail/archive-2008-02/0043.html
60. libcurl closes the connection if an HTTP 401 reply is received while it
is waiting for the the 100-continue response.
https://curl.haxx.se/mail/lib-2008-08/0462.html
http://curl.haxx.se/mail/lib-2008-08/0462.html
58. It seems sensible to be able to use CURLOPT_NOBODY and
CURLOPT_FAILONERROR with FTP to detect if a file exists or not, but it is
not working: https://curl.haxx.se/mail/lib-2008-07/0295.html
not working: http://curl.haxx.se/mail/lib-2008-07/0295.html
57. On VMS-Alpha: When using an http-file-upload the file is not sent to the
Server with the correct content-length. Sending a file with 511 or less
bytes, content-length 512 is used. Sending a file with 513 - 1023 bytes,
content-length 1024 is used. Files with a length of a multiple of 512 Bytes
show the correct content-length. Only these files work for upload.
http://curl.haxx.se/bug/view.cgi?id=2057858
56. When libcurl sends CURLOPT_POSTQUOTE commands when connected to a SFTP
server using the multi interface, the commands are not being sent correctly
and instead the connection is "cancelled" (the operation is considered done)
prematurely. There is a half-baked (busy-looping) patch provided in the bug
report but it cannot be accepted as-is. See
https://curl.haxx.se/bug/view.cgi?id=748
http://curl.haxx.se/bug/view.cgi?id=2006544
55. libcurl fails to build with MIT Kerberos for Windows (KfW) due to KfW's
library header files exporting symbols/macros that should be kept private
@ -152,13 +88,13 @@ may have been fixed since this was written!
52. Gautam Kachroo's issue that identifies a problem with the multi interface
where a connection can be re-used without actually being properly
SSL-negotiated:
https://curl.haxx.se/mail/lib-2008-01/0277.html
http://curl.haxx.se/mail/lib-2008-01/0277.html
49. If using --retry and the transfer timeouts (possibly due to using -m or
-y/-Y) the next attempt doesn't resume the transfer properly from what was
downloaded in the previous attempt but will truncate and restart at the
original position where it was at before the previous failed attempt. See
https://curl.haxx.se/mail/lib-2008-01/0080.html and Mandriva bug report
http://curl.haxx.se/mail/lib-2008-01/0080.html and Mandriva bug report
https://qa.mandriva.com/show_bug.cgi?id=22565
48. If a CONNECT response-headers are larger than BUFSIZE (16KB) when the
@ -167,41 +103,68 @@ may have been fixed since this was written!
protocol code. This should be very rare.
43. There seems to be a problem when connecting to the Microsoft telnet server.
https://curl.haxx.se/bug/view.cgi?id=649
http://curl.haxx.se/bug/view.cgi?id=1720605
41. When doing an operation over FTP that requires the ACCT command (but not
when logging in), the operation will fail since libcurl doesn't detect this
and thus fails to issue the correct command:
https://curl.haxx.se/bug/view.cgi?id=635
http://curl.haxx.se/bug/view.cgi?id=1693337
39. Steffen Rumler's Race Condition in Curl_proxyCONNECT:
https://curl.haxx.se/mail/lib-2007-01/0045.html
http://curl.haxx.se/mail/lib-2007-01/0045.html
38. Kumar Swamy Bhatt's problem in ftp/ssl "LIST" operation:
https://curl.haxx.se/mail/lib-2007-01/0103.html
http://curl.haxx.se/mail/lib-2007-01/0103.html
37. Having more than one connection to the same host when doing NTLM
authentication (with performs multiple "passes" and authenticates a
connection rather than a HTTP request), and particularly when using the
multi interface, there's a risk that libcurl will re-use a wrong connection
when doing the different passes in the NTLM negotiation and thus fail to
negotiate (in seemingly mysterious ways).
35. Both SOCKS5 and SOCKS4 proxy connections are done blocking, which is very
bad when used with the multi interface.
34. The SOCKS4 connection codes don't properly acknowledge (connect) timeouts.
Also see #12. According to bug #1556528, even the SOCKS5 connect code does
not do it right: https://curl.haxx.se/bug/view.cgi?id=604
not do it right: http://curl.haxx.se/bug/view.cgi?id=1556528,
31. "curl-config --libs" will include details set in LDFLAGS when configure is
run that might be needed only for building libcurl. Further, curl-config
--cflags suffers from the same effects with CFLAGS/CPPFLAGS.
30. You need to use -g to the command line tool in order to use RFC2732-style
IPv6 numerical addresses in URLs.
29. IPv6 URLs with zone ID is not nicely supported.
http://www.ietf.org/internet-drafts/draft-fenner-literal-zone-02.txt (expired)
specifies the use of a plus sign instead of a percent when specifying zone
IDs in URLs to get around the problem of percent signs being
special. According to the reporter, Firefox deals with the URL _with_ a
percent letter (which seems like a blatant URL spec violation).
libcurl supports zone IDs where the percent sign is URL-escaped (i.e. %25).
See http://curl.haxx.se/bug/view.cgi?id=1371118
26. NTLM authentication using SSPI (on Windows) when (lib)curl is running in
"system context" will make it use wrong(?) user name - at least when compared
to what winhttp does. See https://curl.haxx.se/bug/view.cgi?id=535
to what winhttp does. See http://curl.haxx.se/bug/view.cgi?id=1281867
23. SOCKS-related problems:
A) libcurl doesn't support SOCKS for IPv6.
B) libcurl doesn't support FTPS over a SOCKS proxy.
E) libcurl doesn't support active FTP over a SOCKS proxy
We probably have even more bugs and lack of features when a SOCKS proxy is
used.
22. Sending files to a FTP server using curl on VMS, might lead to curl
complaining on "unaligned file size" on completion. The problem is related
to VMS file structures and the perceived file sizes stat() returns. A
possible fix would involve sending a "STRU VMS" command.
http://curl.haxx.se/bug/view.cgi?id=1156287
21. FTP ASCII transfers do not follow RFC959. They don't convert the data
accordingly (not for sending nor for receiving). RFC 959 section 3.1.1.1
clearly describes how this should be done:
@ -221,7 +184,7 @@ may have been fixed since this was written!
be to use a data structure other than a plain C string, one that can handle
embedded NUL characters. From a practical standpoint, most FTP servers
would not meaningfully support NUL characters within RFC 959 <string>,
anyway (e.g., Unix pathnames may not contain NUL).
anyway (e.g., UNIX pathnames may not contain NUL).
14. Test case 165 might fail on a system which has libidn present, but with an
old iconv version (2.1.3 is a known bad version), since it doesn't recognize
@ -236,11 +199,11 @@ may have been fixed since this was written!
acknowledged after the actual TCP connect (during the SOCKS "negotiate"
phase).
10. To get HTTP Negotiate (SPNEGO) authentication to work fine, you need to
provide a (fake) user name (this concerns both curl and the lib) because the
code wrongly only considers authentication if there's a user name provided.
https://curl.haxx.se/bug/view.cgi?id=440 How?
https://curl.haxx.se/mail/lib-2004-08/0182.html
10. To get HTTP Negotiate authentication to work fine, you need to provide a
(fake) user name (this concerns both curl and the lib) because the code
wrongly only considers authentication if there's a user name provided.
http://curl.haxx.se/bug/view.cgi?id=1004841. How?
http://curl.haxx.se/mail/lib-2004-08/0182.html
8. Doing resumed upload over HTTP does not work with '-C -', because curl
doesn't do a HEAD first to get the initial size. This needs to be done
@ -256,4 +219,14 @@ may have been fixed since this was written!
5. libcurl doesn't treat the content-length of compressed data properly, as
it seems HTTP servers send the *uncompressed* length in that header and
libcurl thinks of it as the *compressed* length. Some explanations are here:
https://curl.haxx.se/mail/lib-2003-06/0146.html
http://curl.haxx.se/mail/lib-2003-06/0146.html
2. If a HTTP server responds to a HEAD request and includes a body (thus
violating the RFC2616), curl won't wait to read the response but just stop
reading and return back. If a second request (let's assume a GET) is then
immediately made to the same server again, the connection will be re-used
fine of course, and the second request will be sent off but when the
response is to get read, the previous response-body is what curl will read
and havoc is what happens.
More details on this is found in this libcurl mailing list thread:
http://curl.haxx.se/mail/lib-2002-08/0000.html

View File

@ -18,34 +18,36 @@ accompany your license with an exception[2]. This particular problem was
addressed when the Modified BSD license was created, which does not have the
announcement clause that collides with GPL.
libcurl https://curl.haxx.se/docs/copyright.html
libcurl http://curl.haxx.se/docs/copyright.html
Uses an MIT (or Modified BSD)-style license that is as liberal as
possible.
possible. Some of the source files that deal with KRB4 have Original
BSD-style announce-clause licenses. You may not distribute binaries
with krb4-enabled libcurl that also link with GPL-licensed code!
OpenSSL https://www.openssl.org/source/license.html
OpenSSL http://www.openssl.org/source/license.html
(May be used for SSL/TLS support) Uses an Original BSD-style license
with an announcement clause that makes it "incompatible" with GPL. You
are not allowed to ship binaries that link with OpenSSL that includes
GPL code (unless that specific GPL code includes an exception for
OpenSSL - a habit that is growing more and more common). If OpenSSL's
licensing is a problem for you, consider using another TLS library.
licensing is a problem for you, consider using GnuTLS or yassl
instead.
GnuTLS http://www.gnutls.org/
(May be used for SSL/TLS support) Uses the LGPL[3] license. If this is
a problem for you, consider using another TLS library. Also note that
a problem for you, consider using OpenSSL instead. Also note that
GnuTLS itself depends on and uses other libs (libgcrypt and
libgpg-error) and they too are LGPL- or GPL-licensed.
WolfSSL https://www.wolfssl.com/
yassl http://www.yassl.com/
(May be used for SSL/TLS support) Uses the GPL[1] license or a
propietary license. If this is a problem for you, consider using
another TLS library.
(May be used for SSL/TLS support) Uses the GPL[1] license. If this is
a problem for you, consider using OpenSSL or GnuTLS instead.
NSS https://developer.mozilla.org/en-US/docs/Mozilla/Projects/NSS
NSS http://www.mozilla.org/projects/security/pki/nss/
(May be used for SSL/TLS support) Is covered by the MPL[4] license,
the GPL[1] license and the LGPL[3] license. You may choose to license
@ -57,49 +59,47 @@ axTLS http://axtls.sourceforge.net/
(May be used for SSL/TLS support) Uses a Modified BSD-style license.
mbedTLS https://tls.mbed.org/
(May be used for SSL/TLS support) Uses the GPL[1] license or a
propietary license. If this is a problem for you, consider using
another TLS library.
BoringSSL https://boringssl.googlesource.com/
(May be used for SSL/TLS support) As an OpenSSL fork, it has the same
license as that.
libressl http://www.libressl.org/
(May be used for SSL/TLS support) As an OpenSSL fork, it has the same
license as that.
c-ares https://daniel.haxx.se/projects/c-ares/license.html
c-ares http://daniel.haxx.se/projects/c-ares/license.html
(Used for asynchronous name resolves) Uses an MIT license that is very
liberal and imposes no restrictions on any other library or part you
may link with.
zlib http://www.zlib.net/zlib_license.html
zlib http://www.gzip.org/zlib/zlib_license.html
(Used for compressed Transfer-Encoding support) Uses an MIT-style
license that shouldn't collide with any other library.
krb4
While nothing in particular says that a Kerberos4 library must use any
particular license, the one I've tried and used successfully so far
(kth-krb4) is partly Original BSD-licensed with the announcement
clause. Some of the code in libcurl that is written to deal with
Kerberos4 is Modified BSD-licensed.
MIT Kerberos http://web.mit.edu/kerberos/www/dist/
(May be used for GSS support) MIT licensed, that shouldn't collide
with any other parts.
Heimdal http://www.h5l.org
Heimdal http://www.pdc.kth.se/heimdal/
(May be used for GSS support) Heimdal is Original BSD licensed with
the announcement clause.
GNU GSS https://www.gnu.org/software/gss/
GNU GSS http://www.gnu.org/software/gss/
(May be used for GSS support) GNU GSS is GPL licensed. Note that you
may not distribute binary curl packages that uses this if you build
curl to also link and use any Original BSD licensed libraries!
fbopenssl
(Used for SPNEGO support) Unclear license. Based on its name, I assume
that it uses the OpenSSL license and thus shares the same issues as
described for OpenSSL above.
libidn http://josefsson.org/libidn/
(Used for IDNA support) Uses the GNU Lesser General Public
@ -121,10 +121,10 @@ libssh2 http://www.libssh2.org/
(Used for scp and sftp support) libssh2 uses a Modified BSD-style
license.
[1] = GPL - GNU General Public License: https://www.gnu.org/licenses/gpl.html
[2] = https://www.gnu.org/licenses/gpl-faq.html#GPLIncompatibleLibs details on
[1] = GPL - GNU General Public License: http://www.gnu.org/licenses/gpl.html
[2] = http://www.fsf.org/licenses/gpl-faq.html#GPLIncompatibleLibs details on
how to write such an exception to the GPL
[3] = LGPL - GNU Lesser General Public License:
https://www.gnu.org/licenses/lgpl.html
http://www.gnu.org/licenses/lgpl.html
[4] = MPL - Mozilla Public License:
https://www.mozilla.org/MPL/
http://www.mozilla.org/MPL/

View File

@ -13,8 +13,6 @@ MAIL ETIQUETTE
1.4 Subscription Required
1.5 Moderation of new posters
1.6 Handling trolls and spam
1.7 How to unsubscribe
1.8 I posted, now what?
2. Sending mail
2.1 Reply or New Mail
@ -33,7 +31,7 @@ MAIL ETIQUETTE
1.1 Mailing Lists
The mailing lists we have are all listed and described at
https://curl.haxx.se/mail/
http://curl.haxx.se/mail/
Each mailing list is targeted to a specific set of users and subjects,
please use the one or the ones that suit you the most.
@ -60,7 +58,7 @@ MAIL ETIQUETTE
no way to read the reply, but to ask the one person the question. The one
person consequently gets overloaded with mail.
If you really want to contact an individual and perhaps pay for his or her
If you really want to contact an individual and perhaps pay for his or her's
services, by all means go ahead, but if it's just another curl question,
take it to a suitable list instead.
@ -93,7 +91,7 @@ MAIL ETIQUETTE
1.6 Handling trolls and spam
Despite our good intentions and hard work to keep spam off the lists and to
Despite our good intensions and hard work to keep spam off the lists and to
maintain a friendly and positive atmosphere, there will be times when spam
and or trolls get through.
@ -106,62 +104,12 @@ MAIL ETIQUETTE
No matter what, we NEVER EVER respond to trolls or spammers on the list. If
you believe the list admin should do something particular, contact him/her
off-list. The subject will be taken care of as good as possible to prevent
repeated offenses, but responding on the list to such messages never lead to
repeated offences, but responding on the list to such messages never lead to
anything good and only puts the light even more on the offender: which was
the entire purpose of it getting to the list in the first place.
Don't feed the trolls!
1.7 How to unsubscribe
You unsubscribe the same way you subscribed in the first place. You go to
the page for the particular mailing list you're subscribed to and you enter
your email address and password and press the unsubscribe button.
Also, this information is included in the headers of every mail that is sent
out to all curl related mailing lists and there's footer in each mail that
links to the "admin" page on which you can unsubscribe and change other
options.
You NEVER EVER email the mailing list requesting someone else to get you off
the list.
1.8 I posted, now what?
If you aren't subscribed with the exact same email address that you used to
send the email, your post will just be silently discarded.
If you posted for the first time to the mailing list, you first need to wait
for an administrator to allow your email to go through. This normally
happens very quickly but in case we're asleep, you may have to wait a few
hours.
Once your email goes through it is sent out to several hundred or even
thousand recipients. Your email may cover an area that not that many people
know about or are interested in. Or possibly the person who knows about it
is on vacation or under a very heavy work load right now. You have to wait
for a response and you must not expect to get a response at all, but
hopefully you get an answer within a couple of days.
You do yourself and all of us a service when you include as many details as
possible already in your first email. Mention your operating system and
environment. Tell us which curl version you're using and tell us what you
did, what happened and what you expected would happen. Preferably, show us
what you did in details enough to allow others to help point out the problem
or repeat the same steps in their places.
Failing to include details will only delay responses and make people respond
and ask for the details and you have to send a follow-up email that includes
them.
Expect the responses to primarily help YOU debug the issue, or ask you
questions that can lead you or others towards a solution or explanation to
whatever you experience.
If you are a repeat offender to the guidelines outlined in this document,
chances are that people will ignore you at will and your chances to get
responses will greatly diminish.
2. Sending mail
@ -207,8 +155,8 @@ MAIL ETIQUETTE
Q: What is the most annoying thing in e-mail?
Apart from the screwed up read order (especially when mixed together in a
thread when someone responds using the mandated bottom-posting style), it
also makes it impossible to quote only parts of the original mail.
thread when some responds doing the mandaded bottom-posting style), it also
makes it impossible to quote only parts of the original mail.
When you reply to a mail. You let the mail client insert the previous mail
quoted. Then you put the cursor on the first line of the mail and you move
@ -230,7 +178,7 @@ MAIL ETIQUETTE
Quote as little as possible. Just enough to provide the context you cannot
leave out. A lengthy description can be found here:
https://www.netmeister.org/news/learn2quote.html
http://www.netmeister.org/news/learn2quote.html
2.7 Digest

View File

@ -3,7 +3,7 @@ LATEST VERSION
You always find news about what's going on as well as the latest versions
from the curl web pages, located at:
https://curl.haxx.se
http://curl.haxx.se
SIMPLE USAGE
@ -19,7 +19,7 @@ SIMPLE USAGE
curl http://www.weirdserver.com:8000/
Get a directory listing of an FTP site:
Get a list of a directory of an FTP site:
curl ftp://cool.haxx.se/
@ -41,31 +41,20 @@ SIMPLE USAGE
Get a file from an SSH server using SFTP:
curl -u username sftp://example.com/etc/issue
curl -u username sftp://shell.example.com/etc/issue
Get a file from an SSH server using SCP using a private key
(not password-protected) to authenticate:
Get a file from an SSH server using SCP using a private key to authenticate:
curl -u username: --key ~/.ssh/id_rsa \
scp://example.com/~/file.txt
Get a file from an SSH server using SCP using a private key
(password-protected) to authenticate:
curl -u username: --key ~/.ssh/id_rsa --pass private_key_password \
scp://example.com/~/file.txt
curl -u username: --key ~/.ssh/id_dsa --pubkey ~/.ssh/id_dsa.pub \
scp://shell.example.com/~/personal.txt
Get the main page from an IPv6 web server:
curl "http://[2001:1890:1112:1::20]/"
Get a file from an SMB server:
curl -u "domain\username:passwd" smb://server.example.com/share/file.txt
curl -g "http://[2001:1890:1112:1::20]/"
DOWNLOAD TO A FILE
Get a web page and store in a local file with a specific name:
Get a web page and store in a local file:
curl -o thatpage.html http://www.netscape.com/
@ -102,13 +91,10 @@ USING PASSWORDS
SFTP / SCP
This is similar to FTP, but you can use the --key option to specify a
private key to use instead of a password. Note that the private key may
itself be protected by a password that is unrelated to the login password
of the remote system; this password is specified using the --pass option.
Typically, curl will automatically extract the public key from the private
key file, but in cases where curl does not have the proper library support,
a matching public key file must be specified using the --pubkey option.
This is similar to FTP, but you can specify a private key to use instead of
a password. Note that the private key may itself be protected by a password
that is unrelated to the login password of the remote system. If you
provide a private key file you must also provide a public key file.
HTTP
@ -122,15 +108,14 @@ USING PASSWORDS
curl -u name:passwd http://machine.domain/full/path/to/file
HTTP offers many different methods of authentication and curl supports
several: Basic, Digest, NTLM and Negotiate (SPNEGO). Without telling which
method to use, curl defaults to Basic. You can also ask curl to pick the
most secure ones out of the ones that the server accepts for the given URL,
by using --anyauth.
several: Basic, Digest, NTLM and Negotiate. Without telling which method to
use, curl defaults to Basic. You can also ask curl to pick the most secure
ones out of the ones that the server accepts for the given URL, by using
--anyauth.
NOTE! According to the URL specification, HTTP URLs can not contain a user
and password, so that style will not work when using curl via a proxy, even
though curl allows it at other times. When using a proxy, you _must_ use
the -u style for user and password.
NOTE! Since HTTP URLs don't support user and password, you can't use that
style when using Curl via a proxy. You _must_ use the -u style fetch
during such circumstances.
HTTPS
@ -148,7 +133,7 @@ PROXY
curl -x my-proxy:888 ftp://ftp.leachsite.com/README
Get a file from an HTTP server that requires user and password, using the
Get a file from a HTTP server that requires user and password, using the
same proxy as above:
curl -u user:passwd -x my-proxy:888 http://www.get.this/
@ -186,7 +171,7 @@ PROXY
RANGES
HTTP 1.1 introduced byte-ranges. Using this, a client can request
With HTTP 1.1 byte-ranges were introduced. Using this, a client can request
to get only one or more subparts of a specified document. Curl supports
this with the -r flag.
@ -217,8 +202,8 @@ UPLOADING
curl -T uploadfile -u user:passwd ftp://ftp.upload.com/myfile
Upload a local file to the remote site, and use the local file name at the remote
site too:
Upload a local file to the remote site, and use the local file name remote
too:
curl -T uploadfile -u user:passwd ftp://ftp.upload.com/
@ -232,21 +217,16 @@ UPLOADING
curl --proxytunnel -x proxy:port -T localfile ftp.upload.com
SMB / SMBS
curl -T file.txt -u "domain\username:passwd"
smb://server.example.com/share/
HTTP
Upload all data on stdin to a specified HTTP site:
Upload all data on stdin to a specified http site:
curl -T - http://www.upload.com/myfile
Note that the HTTP server must have been configured to accept PUT before
Note that the http server must have been configured to accept PUT before
this can be done successfully.
For other ways to do HTTP data upload, see the POST section below.
For other ways to do http data upload, see the POST section below.
VERBOSE / DEBUG
@ -309,7 +289,7 @@ POST (HTTP)
The 'variable' names are the names set with "name=" in the <input> tags, and
the data is the contents you want to fill in for the inputs. The data *must*
be properly URL encoded. That means you replace space with + and that you
replace weird letters with %XX where XX is the hexadecimal representation of
write weird letters with %XX where XX is the hexadecimal representation of
the letter's ASCII code.
Example:
@ -348,7 +328,7 @@ POST (HTTP)
If the content-type is not specified, curl will try to guess from the file
extension (it only knows a few), or use the previously specified type (from
an earlier file if several files are specified in a list) or else it will
use the default type 'application/octet-stream'.
using the default type 'text/plain'.
Emulate a fill-in form with -F. Let's say you fill in three fields in a
form. One field is a file name which to post, one field is your name and one
@ -381,8 +361,8 @@ POST (HTTP)
REFERRER
An HTTP request has the option to include information about which address
referred it to the actual page. Curl allows you to specify the
A HTTP request has the option to include information about which address
that referred to actual page. Curl allows you to specify the
referrer to be used on the command line. It is especially useful to
fool or trick stupid servers or CGI scripts that rely on that information
being available or contain certain data.
@ -393,7 +373,7 @@ REFERRER
USER AGENT
An HTTP request has the option to include information about the browser
A HTTP request has the option to include information about the browser
that generated the request. Curl allows it to be specified on the command
line. It is especially useful to fool or trick stupid servers or CGI
scripts that only accept certain browsers.
@ -470,8 +450,8 @@ COOKIES
stored cookies which match the request as it follows the location. The
file "empty.txt" may be a nonexistent file.
To read and write cookies from a netscape cookie file, you can set both -b
and -c to use the same file:
Alas, to both read and write cookies from a netscape cookie file, you can
set both -b and -c to use the same file:
curl -b cookies.txt -c cookies.txt www.example.com
@ -633,21 +613,21 @@ SFTP and SCP and PATH NAMES
FTP and firewalls
The FTP protocol requires one of the involved parties to open a second
connection as soon as data is about to get transferred. There are two ways to
connection as soon as data is about to get transfered. There are two ways to
do this.
The default way for curl is to issue the PASV command which causes the
server to open another port and await another connection performed by the
client. This is good if the client is behind a firewall that doesn't allow
client. This is good if the client is behind a firewall that don't allow
incoming connections.
curl ftp.download.com
If the server, for example, is behind a firewall that doesn't allow connections
on ports other than 21 (or if it just doesn't support the PASV command), the
If the server for example, is behind a firewall that don't allow connections
on other ports than 21 (or if it just doesn't support the PASV command), the
other way to do it is to use the PORT command and instruct the server to
connect to the client on the given IP number and port (as parameters to the
PORT command).
connect to the client on the given (as parameters to the PORT command) IP
number and port.
The -P flag to curl supports a few different options. Your machine may have
several IP-addresses and/or network interfaces and curl allows you to select
@ -705,8 +685,8 @@ HTTPS
If you neglect to specify the password on the command line, you will be
prompted for the correct password before any data can be received.
Many older SSL-servers have problems with SSLv3 or TLS, which newer versions
of OpenSSL etc use, therefore it is sometimes useful to specify what
Many older SSL-servers have problems with SSLv3 or TLS, that newer versions
of OpenSSL etc is using, therefore it is sometimes useful to specify what
SSL-version curl should use. Use -3, -2 or -1 to specify that exact SSL
version to use (for SSLv3, SSLv2 or TLSv1 respectively):
@ -715,13 +695,14 @@ HTTPS
Otherwise, curl will first attempt to use v3 and then v2.
To use OpenSSL to convert your favourite browser's certificate into a PEM
formatted one that curl can use, do something like this:
formatted one that curl can use, do something like this (assuming netscape,
but IE is likely to work similarly):
In Netscape, you start with hitting the 'Security' menu button.
You start with hitting the 'security' menu button in netscape.
Select 'certificates->yours' and then pick a certificate in the list
Press the 'Export' button
Press the 'export' button
enter your PIN code for the certs
@ -732,21 +713,11 @@ HTTPS
# ./apps/openssl pkcs12 -in [file you saved] -clcerts -out [PEMfile]
In Firefox, select Options, then Advanced, then the Encryption tab,
View Certificates. This opens the Certificate Manager, where you can
Export. Be sure to select PEM for the Save as type.
In Internet Explorer, select Internet Options, then the Content tab, then
Certificates. Then you can Export, and depending on the format you may
need to convert to PEM.
In Chrome, select Settings, then Show Advanced Settings. Under HTTPS/SSL
select Manage Certificates.
RESUMING FILE TRANSFERS
To continue a file transfer where it was previously aborted, curl supports
resume on HTTP(S) downloads as well as FTP uploads and downloads.
resume on http(s) downloads as well as ftp uploads and downloads.
Continue downloading a document:
@ -760,7 +731,7 @@ RESUMING FILE TRANSFERS
curl -C - -o file http://www.server.com/
(*1) = This requires that the FTP server supports the non-standard command
(*1) = This requires that the ftp server supports the non-standard command
SIZE. If it doesn't, curl will say so.
(*2) = This requires that the web server supports at least HTTP/1.1. If it
@ -769,7 +740,7 @@ RESUMING FILE TRANSFERS
TIME CONDITIONS
HTTP allows a client to specify a time condition for the document it
requests. It is If-Modified-Since or If-Unmodified-Since. Curl allows you to
requests. It is If-Modified-Since or If-Unmodified-Since. Curl allow you to
specify them with the -z/--time-cond flag.
For example, you can easily make a download that only gets performed if the
@ -817,16 +788,16 @@ LDAP
and offer ldap:// support.
LDAP is a complex thing and writing an LDAP query is not an easy task. I do
advise you to dig up the syntax description for that elsewhere. Two places
advice you to dig up the syntax description for that elsewhere. Two places
that might suit you are:
Netscape's "Netscape Directory SDK 3.0 for C Programmer's Guide Chapter 10:
Working with LDAP URLs":
http://developer.netscape.com/docs/manuals/dirsdk/csdk30/url.htm
RFC 2255, "The LDAP URL Format" https://curl.haxx.se/rfc/rfc2255.txt
RFC 2255, "The LDAP URL Format" http://curl.haxx.se/rfc/rfc2255.txt
To show you an example, this is how I can get all people from my local LDAP
To show you an example, this is now I can get all people from my local LDAP
server that has a certain sub-domain in their email address:
curl -B "ldap://ldap.frontec.se/o=frontec??sub?mail=*sth.frontec.se"
@ -860,15 +831,15 @@ ENVIRONMENT VARIABLES
NETRC
Unix introduced the .netrc concept a long time ago. It is a way for a user
to specify name and password for commonly visited FTP sites in a file so
to specify name and password for commonly visited ftp sites in a file so
that you don't have to type them in each time you visit those sites. You
realize this is a big security risk if someone else gets hold of your
passwords, so therefore most unix programs won't read this file unless it is
only readable by yourself (curl doesn't care though).
Curl supports .netrc files if told to (using the -n/--netrc and
--netrc-optional options). This is not restricted to just FTP,
so curl can use it for all protocols where authentication is used.
Curl supports .netrc files if told so (using the -n/--netrc and
--netrc-optional options). This is not restricted to only ftp,
but curl can use it for all protocols where authentication is used.
A very simple .netrc file could look something like:
@ -889,7 +860,7 @@ KERBEROS FTP TRANSFER
Curl supports kerberos4 and kerberos5/GSSAPI for FTP transfers. You need
the kerberos package installed and used at curl build time for it to be
available.
used.
First, get the krb-ticket the normal way, like with the kinit/kauth tool.
Then use curl in way similar to:
@ -924,7 +895,7 @@ TELNET
- NEW_ENV=<var,val> Sets an environment variable.
NOTE: The telnet protocol does not specify any way to login with a specified
NOTE: the telnet protocol does not specify any way to login with a specified
user and password so curl can't do that automatically. To do that, you need
to track when the login prompt is received and send the username and
password accordingly.
@ -943,7 +914,7 @@ PERSISTENT CONNECTIONS
Note that curl cannot use persistent connections for transfers that are used
in subsequence curl invokes. Try to stuff as many URLs as possible on the
same command line if they are using the same host, as that'll make the
transfers faster. If you use an HTTP proxy for file transfers, practically
transfers faster. If you use a http proxy for file transfers, practically
all transfers will be persistent.
MULTIPLE TRANSFERS WITH A SINGLE COMMAND LINE
@ -975,43 +946,21 @@ IPv6
When this style is used, the -g option must be given to stop curl from
interpreting the square brackets as special globbing characters. Link local
and site local addresses including a scope identifier, such as fe80::1234%1,
may also be used, but the scope portion must be numeric or match an existing
network interface on Linux and the percent character must be URL escaped. The
previous example in an SFTP URL might look like:
may also be used, but the scope portion must be numeric and the percent
character must be URL escaped. The previous example in an SFTP URL might
look like:
sftp://[fe80::1234%251]/
IPv6 addresses provided other than in URLs (e.g. to the --proxy, --interface
or --ftp-port options) should not be URL encoded.
METALINK
Curl supports Metalink (both version 3 and 4 (RFC 5854) are supported), a way
to list multiple URIs and hashes for a file. Curl will make use of the mirrors
listed within for failover if there are errors (such as the file or server not
being available). It will also verify the hash of the file after the download
completes. The Metalink file itself is downloaded and processed in memory and
not stored in the local file system.
Example to use a remote Metalink file:
curl --metalink http://www.example.com/example.metalink
To use a Metalink file in the local file system, use FILE protocol (file://):
curl --metalink file://example.metalink
Please note that if FILE protocol is disabled, there is no way to use a local
Metalink file at the time of this writing. Also note that if --metalink and
--include are used together, --include will be ignored. This is because including
headers in the response will break Metalink parser and if the headers are included
in the file described in Metalink file, hash check will fail.
MAILING LISTS
For your convenience, we have several open mailing lists to discuss curl,
its development and things relevant to this. Get all info at
https://curl.haxx.se/mail/. Some of the lists available are:
http://curl.haxx.se/mail/. Some of the lists available are:
curl-users

View File

@ -5,11 +5,11 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
@ -23,9 +23,8 @@
AUTOMAKE_OPTIONS = foreign no-dependencies
man_MANS = curl.1 curl-config.1
noinst_man_MANS = mk-ca-bundle.1
GENHTMLPAGES = curl.html curl-config.html mk-ca-bundle.html
PDFPAGES = curl.pdf curl-config.pdf mk-ca-bundle.pdf
GENHTMLPAGES = curl.html curl-config.html
PDFPAGES = curl.pdf curl-config.pdf
HTMLPAGES = $(GENHTMLPAGES) index.html
@ -33,22 +32,21 @@ SUBDIRS = examples libcurl
CLEANFILES = $(GENHTMLPAGES) $(PDFPAGES)
EXTRA_DIST = MANUAL BUGS CONTRIBUTE FAQ FEATURES INTERNALS SSLCERTS \
README.win32 RESOURCES TODO TheArtOfHttpScripting THANKS VERSIONS \
KNOWN_BUGS BINDINGS $(man_MANS) $(HTMLPAGES) HISTORY INSTALL \
$(PDFPAGES) LICENSE-MIXING README.netware INSTALL.devcpp \
MAIL-ETIQUETTE HTTP-COOKIES SECURITY RELEASE-PROCEDURE SSL-PROBLEMS \
HTTP2.md ROADMAP.md CODE_OF_CONDUCT.md CODE_STYLE.md
EXTRA_DIST = MANUAL BUGS CONTRIBUTE FAQ FEATURES INTERNALS SSLCERTS \
README.win32 RESOURCES TODO TheArtOfHttpScripting THANKS VERSIONS \
KNOWN_BUGS BINDINGS $(man_MANS) $(HTMLPAGES) HISTORY INSTALL \
$(PDFPAGES) LICENSE-MIXING README.netware DISTRO-DILEMMA INSTALL.devcpp \
MAIL-ETIQUETTE
MAN2HTML= roffit < $< >$@
SUFFIXES = .1 .html .pdf
html: $(HTMLPAGES)
cd libcurl && make html
cd libcurl; make html
pdf: $(PDFPAGES)
cd libcurl && make pdf
cd libcurl; make pdf
.1.html:
$(MAN2HTML)

0
docs/README.cmake Normal file → Executable file
View File

View File

@ -10,7 +10,7 @@ README.netware
Curl has been successfully compiled with gcc / nlmconv on different flavours
of Linux as well as with the official Metrowerks CodeWarrior compiler.
While not being the main development target, a continuously growing share of
While not being the main development target, a continously growing share of
curl users are NetWare-based, specially also consuming the lib from PHP.
The unix-style man pages are tricky to read on windows, so therefore are all

View File

@ -1,87 +0,0 @@
curl release procedure - how to do a release
============================================
in the source code repo
-----------------------
- edit `RELEASE-NOTES` to be accurate
- update `docs/THANKS`
- make sure all relevant changes are committed on the master branch
- tag the git repo in this style: `git tag -a curl-7_34_0`. -a annotates the
tag and we use underscores instead of dots in the version number.
- run "./maketgz 7.34.0" to build the release tarballs. It is important that
you run this on a machine with the correct set of autotools etc installed
as this is what then will be shipped and used by most users on *nix like
systems.
- push the git commits and the new tag
- gpg sign the 4 tarballs as maketgz suggests
- upload the 8 resulting files to the primary download directory
in the curl-www repo
--------------------
- edit `Makefile` (version number and date),
- edit `_newslog.html` (announce the new release) and
- edit `_changes.html` (insert changes+bugfixes from RELEASE-NOTES)
- commit all local changes
- tag the repo with the same tag as used for the source repo
- make sure all relevant changes are committed and pushed on the master branch
(the web site then updates its contents automatically)
inform
------
- send an email to curl-users, curl-announce and curl-library. Insert the
RELEASE-NOTES into the mail.
celebrate
---------
- suitable beverage intake is encouraged for the festivities
curl release scheduling
=======================
Basics
------
We do releases every 8 weeks on Wednesdays. If critical problems arise, we can
insert releases outside of the schedule or we can move the release date - but
this is very rare.
Each 8 week release cycle is split in two 4-week periods.
- During the first 4 weeks after a release, we allow new features and changes
to curl and libcurl. If we accept any such changes, we bump the minor number
used for the next release.
- During the second 4-week period we do not merge any features or changes, we
then only focus on fixing bugs and polishing things to make a solid coming
release.
Coming dates
------------
Based on the description above, here are some planned release dates (at the
time of this writing):
- October 7, 2015 (version 7.45.0)
- December 2, 2015
- January 27, 2016
- March 23, 2016
- May 18, 2016
- July 13, 2016
- September 7, 2016

View File

@ -36,7 +36,7 @@ This document lists documents and standards used by curl.
RFC 2109 - HTTP State Management Mechanism (cookie stuff)
- Also, read Netscape's specification at
https://curl.haxx.se/rfc/cookie_spec.html
http://curl.haxx.se/rfc/cookie_spec.html
RFC 2183 - The Content-Disposition Header Field

View File

@ -1,133 +0,0 @@
curl the next few years - perhaps
=================================
Roadmap of things Daniel Stenberg and Steve Holme want to work on next. It is
intended to serve as a guideline for others for information, feedback and
possible participation.
HTTP/2
------
- test suite
Base this on existing nghttp2 server to start with to make functional
tests. Later on we can adopt that code or work with nghttp2 to provide ways
to have the http2 server respond with broken responses to make sure we deal
with that nicely as well.
To decide: if we need to bundle parts of the nghttp2 stuff that probably
won't be shipped by many distros.
- provide option for HTTP/2 "prior knowledge" over clear text
As it would avoid the roundtrip-heavy Upgrade: procedures when you _know_
it speaks HTTP/2.
HTTP cookies
------------
Two cookie drafts have been adopted by the httpwg in IETF and we should
support them as the popular browsers will as well:
[Deprecate modification of 'secure' cookies from non-secure
origins](https://tools.ietf.org/html/draft-ietf-httpbis-cookie-alone-00)
[Cookie Prefixes](https://tools.ietf.org/html/draft-ietf-httpbis-cookie-prefixes-00)
[Firefox bug report about secure cookies](https://bugzilla.mozilla.org/show_bug.cgi?id=976073)
SRV records
-----------
How to find services for specific domains/hosts.
HTTPS to proxy
--------------
To avoid network traffic to/from the proxy getting snooped on. There's a git
branch in the public git repository for this that we need to make sure works
for all TLS backends and then merge!
curl_formadd()
--------------
make sure there's an easy handle passed in to `curl_formadd()`,
`curl_formget()` and `curl_formfree()` by adding replacement functions and
deprecating the old ones to allow custom mallocs and more
third-party SASL
----------------
add support for third-party SASL libraries such as Cyrus SASL - may need to
move existing native and SSPI based authentication into vsasl folder after
reworking HTTP and SASL code
SASL authentication in LDAP
---------------------------
...
Simplify the SMTP email
-----------------------
Simplify the SMTP email interface so that programmers don't have to
construct the body of an email that contains all the headers, alternative
content, images and attachments - maintain raw interface so that
programmers that want to do this can
email capabilities
------------------
Allow the email protocols to return the capabilities before
authenticating. This will allow an application to decide on the best
authentication mechanism
Win32 pthreads
--------------
Allow Windows threading model to be replaced by Win32 pthreads port
dynamic buffer size
-------------------
Implement a dynamic buffer size to allow SFTP to use much larger buffers and
possibly allow the size to be customizable by applications. Use less memory
when handles are not in use?
New stuff - curl
----------------
1. Embed a language interpreter (lua?). For that middle ground where curl
isnt enough and a libcurl binding feels “too much”. Build-time conditional
of course.
2. Simplify the SMTP command line so that the headers and multi-part content
don't have to be constructed before calling curl
Improve
-------
1. build for windows (considered hard by many users)
2. curl -h output (considered overwhelming to users)
3. we have > 170 command line options, is there a way to redo things to
simplify or improve the situation as we are likely to keep adding
features/options in the future too
4. docs (considered "bad" by users but how do we make it better?)
- split up curl.1
5. authentication framework (consider merging HTTP and SASL authentication to
give one API for protocols to call)
6. Perform some of the clean up from the TODO document, removing old
definitions and such like that are currently earmarked to be removed years
ago
Remove
------
1. makefile.vc files as there is no point in maintaining two sets of Windows
makefiles. Note: These are currently being used by the Windows autobuilds

View File

@ -1,107 +0,0 @@
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
curl security for developers
============================
This document is intended to provide guidance to curl developers on how
security vulnerabilities should be handled.
Publishing Information
----------------------
All known and public curl or libcurl related vulnerabilities are listed on
[the curl web site security page](https://curl.haxx.se/docs/security.html).
Security vulnerabilities should not be entered in the project's public bug
tracker unless the necessary configuration is in place to limit access to the
issue to only the reporter and the project's security team.
Vulnerability Handling
----------------------
The typical process for handling a new security vulnerability is as follows.
No information should be made public about a vulnerability until it is
formally announced at the end of this process. That means, for example that a
bug tracker entry must NOT be created to track the issue since that will make
the issue public and it should not be discussed on any of the project's public
mailing lists. Also messages associated with any commits should not make
any reference to the security nature of the commit if done prior to the public
announcement.
- The person discovering the issue, the reporter, reports the vulnerability
privately to `curl-security@haxx.se`. That's an email alias that reaches a
handful of selected and trusted people.
- Messages that do not relate to the reporting or managing of an undisclosed
security vulnerability in curl or libcurl are ignored and no further action
is required.
- A person in the security team sends an e-mail to the original reporter to
acknowledge the report.
- The security team investigates the report and either rejects it or accepts
it.
- If the report is rejected, the team writes to the reporter to explain why.
- If the report is accepted, the team writes to the reporter to let him/her
know it is accepted and that they are working on a fix.
- The security team discusses the problem, works out a fix, considers the
impact of the problem and suggests a release schedule. This discussion
should involve the reporter as much as possible.
- The release of the information should be "as soon as possible" and is most
often synced with an upcoming release that contains the fix. If the
reporter, or anyone else, thinks the next planned release is too far away
then a separate earlier release for security reasons should be considered.
- Write a security advisory draft about the problem that explains what the
problem is, its impact, which versions it affects, solutions or
workarounds, when the release is out and make sure to credit all
contributors properly.
- Request a CVE number from distros@openwall[1] when also informing and
preparing them for the upcoming public security vulnerability announcement -
attach the advisory draft for information. Note that 'distros' won't accept
an embargo longer than 19 days.
- Update the "security advisory" with the CVE number.
- The security team commits the fix in a private branch. The commit message
should ideally contain the CVE number. This fix is usually also distributed
to the 'distros' mailing list to allow them to use the fix prior to the
public announcement.
- At the day of the next release, the private branch is merged into the master
branch and pushed. Once pushed, the information is accessible to the public
and the actual release should follow suit immediately afterwards.
- The project team creates a release that includes the fix.
- The project team announces the release and the vulnerability to the world in
the same manner we always announce releases. It gets sent to the
curl-announce, curl-library and curl-users mailing lists.
- The security web page on the web site should get the new vulnerability
mentioned.
[1] = http://oss-security.openwall.org/wiki/mailing-lists/distros
CURL-SECURITY (at haxx dot se)
------------------------------
Who is on this list? There are a couple of criteria you must meet, and then we
might ask you to join the list or you can ask to join it. It really isn't very
formal. We basically only require that you have a long-term presence in the
curl project and you have shown an understanding for the project and its way
of working. You must've been around for a good while and you should have no
plans in vanishing in the near future.
We do not make the list of partipants public mostly because it tends to vary
somewhat over time and a list somewhere will only risk getting outdated.

View File

@ -1,87 +0,0 @@
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
SSL problems
First, let's establish that we often refer to TLS and SSL interchangeably as
SSL here. The current protocol is called TLS, it was called SSL a long time
ago.
There are several known reasons why a connection that involves SSL might
fail. This is a document that attempts to details the most common ones and
how to mitigate them.
CA certs
CA certs are used to digitally verify the server's certificate. You need a
"ca bundle" for this. See lots of more details on this in the SSLCERTS
document.
CA bundle missing intermediate certificates
When using said CA bundle to verify a server cert, you will experience
problems if your CA cert does not have the certificates for the
intermediates in the whole trust chain.
Protocol version
Some broken servers fail to support the protocol negotiation properly that
SSL servers are supposed to handle. This may cause the connection to fail
completely. Sometimes you may need to explicitly select a SSL version to use
when connecting to make the connection succeed.
An additional complication can be that modern SSL libraries sometimes are
built with support for older SSL and TLS versions disabled!
All versions of SSL are considered insecure and should be avoided. Use TLS.
Ciphers
Clients give servers a list of ciphers to select from. If the list doesn't
include any ciphers the server wants/can use, the connection handshake
fails.
curl has recently disabled the user of a whole bunch of seriously insecure
ciphers from its default set (slightly depending on SSL backend in use).
You may have to explicitly provide an alternative list of ciphers for curl
to use to allow the server to use a WEAK cipher for you.
Note that these weak ciphers are identified as flawed. For example, this
includes symmetric ciphers with less than 128 bit keys and RC4.
WinSSL in Windows XP is not able to connect to servers that no longer
support the legacy handshakes and algorithms used by those versions, so we
advice against building curl to use WinSSL on really old Windows versions.
References:
https://tools.ietf.org/html/draft-popov-tls-prohibiting-rc4-01
Allow BEAST
BEAST is the name of a TLS 1.0 attack that surfaced 2011. When adding means
to mitigate this attack, it turned out that some broken servers out there in
the wild didn't work properly with the BEAST mitigation in place.
To make such broken servers work, the --ssl-allow-beast option was
introduced. Exactly as it sounds, it re-introduces the BEAST vulnerability
but on the other hand it allows curl to connect to that kind of strange
servers.
Disabling certificate revocation checks
Some SSL backends may do certificate revocation checks (CRL, OCSP, etc)
depending on the OS or build configuration. The --ssl-no-revoke option was
introduced in 7.44.0 to disable revocation checking but currently is only
supported for WinSSL (the native Windows SSL library), with an exception in
the case of Windows' Untrusted Publishers blacklist which it seems can't be
bypassed. This option may have broader support to accommodate other SSL
backends in the future.
References:
https://curl.haxx.se/docs/ssl-compared.html

View File

@ -1,110 +1,71 @@
SSL Certificate Verification
============================
SSL is TLS
----------
SSL is the old name. It is called TLS these days.
Native SSL
----------
If libcurl was built with Schannel or Secure Transport support (the native SSL
libraries included in Windows and Mac OS X), then this does not apply to
you. Scroll down for details on how the OS-native engines handle SSL
certificates. If you're not sure, then run "curl -V" and read the results. If
the version string says "WinSSL" in it, then it was built with Schannel
support.
It is about trust
-----------------
This system is about trust. In your local CA certificate store you have certs
from *trusted* Certificate Authorities that you then can use to verify that the
server certificates you see are valid. They're signed by one of the CAs you
trust.
Which CAs do you trust? You can decide to trust the same set of companies your
operating system trusts, or the set one of the known browsers trust. That's
basically trust via someone else you trust. You should just be aware that
modern operating systems and browsers are setup to trust *hundreds* of
companies and recent years several such CAs have been found untrustworthy.
Certificate Verification
------------------------
Peer SSL Certificate Verification
=================================
libcurl performs peer SSL certificate verification by default. This is done
by using a CA certificate store that the SSL library can use to make sure the
peer's server certificate is valid.
by using CA cert bundle that the SSL library can use to make sure the peer's
server certificate is valid.
If you communicate with HTTPS, FTPS or other TLS-using servers using
certificates that are signed by CAs present in the store, you can be sure
that the remote server really is the one it claims to be.
If you communicate with HTTPS or FTPS servers using certificates that are
signed by CAs present in the bundle, you can be sure that the remote server
really is the one it claims to be.
Until 7.18.0, curl bundled a severely outdated ca bundle file that was
installed by default. These days, the curl archives include no ca certs at
all. You need to get them elsewhere. See below for example.
If the remote server uses a self-signed certificate, if you don't install a CA
cert store, if the server uses a certificate signed by a CA that isn't
included in the store you use or if the remote host is an impostor
cert bundle, if the server uses a certificate signed by a CA that isn't
included in the bundle you use or if the remote host is an impostor
impersonating your favorite site, and you want to transfer files from this
server, do one of the following:
1. Tell libcurl to *not* verify the peer. With libcurl you disable this with
`curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, FALSE);`
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, FALSE);
With the curl command line tool, you disable this with -k/--insecure.
2. Get a CA certificate that can verify the remote server and use the proper
option to point out this CA cert for verification when connecting. For
libcurl hackers: `curl_easy_setopt(curl, CURLOPT_CAPATH, capath);`
libcurl hackers: curl_easy_setopt(curl, CURLOPT_CAPATH, capath);
With the curl command line tool: --cacert [file]
3. Add the CA cert for your server to the existing default CA certificate
store. The default CA certificate store can changed at compile time with the
following configure options:
3. Add the CA cert for your server to the existing default CA cert bundle.
The default path of the CA bundle used can be changed by running configure
with the --with-ca-bundle option pointing out the path of your choice.
--with-ca-bundle=FILE: use the specified file as CA certificate store. CA
certificates need to be concatenated in PEM format into this file.
--with-ca-path=PATH: use the specified path as CA certificate store. CA
certificates need to be stored as individual PEM files in this directory.
You may need to run c_rehash after adding files there.
If neither of the two options is specified, configure will try to auto-detect
a setting. It's also possible to explicitly not hardcode any default store
but rely on the built in default the crypto library may provide instead.
You can achieve that by passing both --without-ca-bundle and
--without-ca-path to the configure script.
To do this, you need to get the CA cert for your server in PEM format and
then append that to your CA cert bundle.
If you use Internet Explorer, this is one way to get extract the CA cert
for a particular server:
- View the certificate by double-clicking the padlock
- Find out where the CA certificate is kept (Certificate>
o View the certificate by double-clicking the padlock
o Find out where the CA certificate is kept (Certificate>
Authority Information Access>URL)
- Get a copy of the crt file using curl
- Convert it from crt to PEM using the openssl tool:
o Get a copy of the crt file using curl
o Convert it from crt to PEM using the openssl tool:
openssl x509 -inform DES -in yourdownloaded.crt \
-out outcert.pem -text
- Add the 'outcert.pem' to the CA certificate store or use it stand-alone
o Append the 'outcert.pem' to the CA cert bundle or use it stand-alone
as described below.
If you use the 'openssl' tool, this is one way to get extract the CA cert
for a particular server:
- `openssl s_client -connect xxxxx.com:443 |tee logfile`
- type "QUIT", followed by the "ENTER" key
- The certificate will have "BEGIN CERTIFICATE" and "END CERTIFICATE"
o openssl s_client -connect xxxxx.com:443 |tee logfile
o type "QUIT", followed by the "ENTER" key
o The certificate will have "BEGIN CERTIFICATE" and "END CERTIFICATE"
markers.
- If you want to see the data in the certificate, you can do: "openssl
o If you want to see the data in the certificate, you can do: "openssl
x509 -inform PEM -in certfile -text -out certdata" where certfile is
the cert you extracted from logfile. Look in certdata.
- If you want to trust the certificate, you can add it to your CA
certificate store or use it stand-alone as described. Just remember that
the security is no better than the way you obtained the certificate.
o If you want to trust the certificate, you can append it to your
cert_bundle or use it stand-alone as described. Just remember that the
security is no better than the way you obtained the certificate.
4. If you're using the curl command line tool, you can specify your own CA
cert path by setting the environment variable `CURL_CA_BUNDLE` to the path
cert path by setting the environment variable CURL_CA_BUNDLE to the path
of your choice.
If you're using the curl command line tool on Windows, curl will search
@ -119,45 +80,37 @@ server, do one of the following:
5. Get a better/different/newer CA cert bundle! One option is to extract the
one a recent Firefox browser uses by running 'make ca-bundle' in the curl
build tree root, or possibly download a version that was generated this
way for you: [CA Extract](https://curl.haxx.se/docs/caextract.html)
way for you:
http://curl.haxx.se/docs/caextract.html
Neglecting to use one of the above methods when dealing with a server using a
certificate that isn't signed by one of the certificates in the installed CA
certificate store, will cause SSL to report an error ("certificate verify
failed") during the handshake and SSL will then refuse further communication
with that server.
cert bundle, will cause SSL to report an error ("certificate verify failed")
during the handshake and SSL will then refuse further communication with that
server.
Certificate Verification with NSS
---------------------------------
Peer SSL Certificate Verification with NSS
==========================================
If libcurl was built with NSS support, then depending on the OS distribution,
it is probably required to take some additional steps to use the system-wide
CA cert db. RedHat ships with an additional module, libnsspem.so, which
enables NSS to read the OpenSSL PEM CA bundle. On openSUSE you can install
p11-kit-nss-trust which makes NSS use the system wide CA certificate store. NSS
also has a new [database format](https://wiki.mozilla.org/NSS_Shared_DB).
If libcurl is build with NSS support then depending on the OS distribution it
is probably required to take some additional steps to use the system-wide CA
cert db. RedHat ships with an additional module libnsspem.so which enables NSS
to read the OpenSSL PEM CA bundle. With OpenSuSE this lib is missing, and NSS
can only work with its own internal formats. Also NSS got a new database
format:
https://wiki.mozilla.org/NSS_Shared_DB
Starting with version 7.19.7 libcurl will check for the NSS version it runs,
and add automatically the 'sql:' prefix to the certdb directory (either the
hardcoded default /etc/pki/nssdb or the directory configured with SSL_DIR
environment variable) if a version 3.12.0 or later is detected.
To check which certdb format your distribution provides examine the default
certdb location /etc/pki/nssdb; the new certdb format can be identified by
the filenames cert9.db, key4.db, pkcs11.txt; filenames of older versions are
cert8.db, key3.db, modsec.db.
Usually these cert databases are empty; but NSS also has built-in CAs which are
provided through a shared library libnssckbi.so; if you want to use these
built-in CAs then create a symlink to libnssckbi.so in /etc/pki/nssdb:
ln -s /usr/lib[64]/libnssckbi.so /etc/pki/nssdb/libnssckbi.so
Starting with version 7.19.7, libcurl automatically adds the 'sql:' prefix to
the certdb directory (either the hardcoded default /etc/pki/nssdb or the
directory configured with SSL_DIR environment variable). To check which certdb
format your distribution provides, examine the default certdb location:
/etc/pki/nssdb; the new certdb format can be identified by the filenames
cert9.db, key4.db, pkcs11.txt; filenames of older versions are cert8.db,
key3.db, secmod.db.
Certificate Verification with Schannel and Secure Transport
-----------------------------------------------------------
If libcurl was built with Schannel (Microsoft's native TLS engine) or Secure
Transport (Apple's native TLS engine) support, then libcurl will still perform
peer certificate verification, but instead of using a CA cert bundle, it will
use the certificates that are built into the OS. These are the same
certificates that appear in the Internet Options control panel (under Windows)
or Keychain Access application (under OS X). Any custom security rules for
certificates will be honored.
Schannel will run CRL checks on certificates unless peer verification is
disabled. Secure Transport on iOS will run OCSP checks on certificates unless
peer verification is disabled. Secure Transport on OS X will run either OCSP
or CRL checks on certificates if those features are enabled, and this behavior
can be adjusted in the preferences of Keychain Access.

File diff suppressed because it is too large Load Diff

View File

@ -1,55 +0,0 @@
# This is a list of names we have recorded that already are thanked
# appropriately in THANKS. This list contains variations of their names and
# their "canonical" name. This file is used for scripting purposes to avoid
# duplicate entries and will not be included in release tarballs.
# When removing dupes that aren't identical names from THANKS, add a line
# here!
#
# Used-by: contributor.sh
s/Andres Garcia/Andrés García/
s/Chris Conroy/Christopher Conroy/
s/Francois Charlier/François Charlier/
s/Gokhan Sengun/Gökhan Şengün/
s/John Malmberg/John E. Malmberg/
s/Luca Alteas/Luca Altea/
s/Michal Gorny/Michał Górny/
s/Michal Górny/Michał Górny/
s/Moonesamy/S. Moonesamy/
s/Pete Su$/Peter Su/
s/Sam Listopad/Samuel Listopad/
s/Sebastien Willemijns/Sébastien Willemijns/
s/YAMADA Yasuharu/Yasuharu Yamada/
s/Karl M$/Karl Moerder/
s/Bjorn Stenberg/Björn Stenberg/
s/upstream tests 305 and 404//
s/Gaël PORTAY/Gaël Portay/
s/Romulo Ceccon/Romulo A. Ceccon/
s/Nach M. S$/Nach M. S./
s/Jay Satiro/Ray Satiro/
s/Richard J. Moore/Richard Moore/
s/Sergey Nikulov/Sergei Nikulov/
s/Petr Písař/Petr Pisar/
s/Nick Zitzmann (originally)/Nick Zitzmann/
s/product-security at Apple//
s/IT DOES NOT WORK//
s/Albert Chin/Albert Chin-A-Young/
s/Paras S\z/Paras Sethia/
s/Paras Sethiaethia/Paras Sethia/
s/Дмитрий Фалько/Dmitry Falko/
s/byte_bucket in the #curl IRC channel//
s/Michal Górny and Anthony G. Basile//
s/Alejandro Alvarez$/Alejandro Alvarez Ayllon/
s/Ant Bryan/Anthony Bryan/
s/Cédric Deltheil/Cédric Deltheil/
s/Christian Hagele/Christian Hägele/
s/douglas steinwand/Douglas Steinwand/
s/Frank Van Uffelen and Fabian Hiernaux//
s/Rodrigo Silva (MestreLion)/Rodrigo Silva/
s/tetetest tetetest//
s/Jiří Hruška/Jiri Hruska/
s/Viktor Szakats/Viktor Szakáts/
s/Jonathan Cardoso/Jonathan Cardoso Machado/
s/Linus Nielsen/Linus Nielsen Feltzing/
s/Todd Ouska$/Todd A Ouska/
s/Tim Ruehsen/Tim Rühsen/
s/Michael Koenig/Michael König/

727
docs/TODO

File diff suppressed because it is too large Load Diff

View File

@ -1,73 +1,16 @@
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
Online: http://curl.haxx.se/docs/httpscripting.html
Date: Jan 19, 2011
The Art Of Scripting HTTP Requests Using Curl
=============================================
The Art Of Scripting HTTP Requests Using Curl
This document will assume that you're familiar with HTML and general
networking.
1. HTTP Scripting
1.1 Background
1.2 The HTTP Protocol
1.3 See the Protocol
1.4 See the Timing
1.5 See the Response
2. URL
2.1 Spec
2.2 Host
2.3 Port number
2.4 User name and password
2.5 Path part
3. Fetch a page
3.1 GET
3.2 HEAD
3.3 Multiple URLs in a single command line
3.4 Multiple HTTP methods in a single command line
4. HTML forms
4.1 Forms explained
4.2 GET
4.3 POST
4.4 File Upload POST
4.5 Hidden Fields
4.6 Figure Out What A POST Looks Like
5. HTTP upload
5.1 PUT
6. HTTP Authentication
6.1 Basic Authentication
6.2 Other Authentication
6.3 Proxy Authentication
6.4 Hiding credentials
7. More HTTP Headers
7.1 Referer
7.2 User Agent
8. Redirects
8.1 Location header
8.2 Other redirects
9. Cookies
9.1 Cookie Basics
9.2 Cookie options
10. HTTPS
10.1 HTTPS is HTTP secure
10.2 Certificates
11. Custom Request Elements
11.1 Modify method and headers
11.2 More on changed methods
12. Web Login
12.1 Some login tricks
13. Debug
13.1 Some debug tricks
14. References
14.1 Standards
14.2 Sites
==============================================================================
1. HTTP Scripting
1.1 Background
This document assumes that you're familiar with HTML and general networking.
The possibility to write scripts is essential to make a good computer
system. Unix' capability to be extended by shell scripts and various tools to
run various automated commands and scripts is one reason why it has succeeded
so well.
The increasing amount of applications moving to the web has made "HTTP
Scripting" more frequently requested and wanted. To be able to automatically
@ -84,7 +27,7 @@ The Art Of Scripting HTTP Requests Using Curl
to glue everything together using some kind of script language or repeated
manual invokes.
1.2 The HTTP Protocol
1. The HTTP Protocol
HTTP is the protocol used to fetch data from web servers. It is a very simple
protocol that is built upon TCP/IP. The protocol also allows information to
@ -101,7 +44,7 @@ The Art Of Scripting HTTP Requests Using Curl
well), response headers and most often also a response body. The "body" part
is the plain data you requested, like the actual HTML or the image etc.
1.3 See the Protocol
1.1 See the Protocol
Using curl's option --verbose (-v as a short option) will display what kind
of commands curl sends to the server, as well as a few other informational
@ -116,157 +59,30 @@ The Art Of Scripting HTTP Requests Using Curl
curl --trace-ascii debugdump.txt http://www.example.com/
1.4 See the Timing
Many times you may wonder what exactly is taking all the time, or you just
want to know the amount of milliseconds between two points in a
transfer. For those, and other similar situations, the --trace-time option
is what you need. It'll prepend the time to each trace output line:
curl --trace-ascii d.txt --trace-time http://example.com/
1.5 See the Response
By default curl sends the response to stdout. You need to redirect it
somewhere to avoid that, most often that is done with -o or -O.
2. URL
2.1 Spec
The Uniform Resource Locator format is how you specify the address of a
particular resource on the Internet. You know these, you've seen URLs like
https://curl.haxx.se or https://yourbank.com a million times. RFC 3986 is the
canonical spec. And yeah, the formal name is not URL, it is URI.
http://curl.haxx.se or https://yourbank.com a million times.
2.2 Host
The host name is usually resolved using DNS or your /etc/hosts file to an IP
address and that's what curl will communicate with. Alternatively you specify
the IP address directly in the URL instead of a name.
For development and other trying out situation, you can point out a different
IP address for a host name than what would otherwise be used, by using curl's
--resolve option:
curl --resolve www.example.org:80:127.0.0.1 http://www.example.org/
2.3 Port number
Each protocol curl supports operate on a default port number, be it over TCP
or in some cases UDP. Normally you don't have to take that into
consideration, but at times you run test servers on other ports or
similar. Then you can specify the port number in the URL with a colon and a
number immediately following the host name. Like when doing HTTP to port
1234:
curl http://www.example.org:1234/
The port number you specify in the URL is the number that the server uses to
offer its services. Sometimes you may use a local proxy, and then you may
need to specify that proxy's port number separate on what curl needs to
connect to locally. Like when using a HTTP proxy on port 4321:
curl --proxy http://proxy.example.org:4321 http://remote.example.org/
2.4 User name and password
Some services are setup to require HTTP authentication and then you need to
provide name and password which then is transferred to the remote site in
various ways depending on the exact authentication protocol used.
You can opt to either insert the user and password in the URL or you can
provide them separately:
curl http://user:password@example.org/
or
curl -u user:password http://example.org/
You need to pay attention that this kind of HTTP authentication is not what
is usually done and requested by user-oriented web sites these days. They
tend to use forms and cookies instead.
2.5 Path part
The path part is just sent off to the server to request that it sends back
the associated response. The path is what is to the right side of the slash
that follows the host name and possibly port number.
3. Fetch a page
3.1 GET
3. GET a page
The simplest and most common request/operation made using HTTP is to get a
URL. The URL could itself refer to a web page, an image or a file. The client
issues a GET request to the server and receives the document it asked for.
If you issue the command line
curl https://curl.haxx.se
curl http://curl.haxx.se
you get a web page returned in your terminal window. The entire HTML document
that that URL holds.
All HTTP replies contain a set of response headers that are normally hidden,
use curl's --include (-i) option to display them as well as the rest of the
document.
document. You can also ask the remote server for ONLY the headers by using
the --head (-I) option (which will make curl issue a HEAD request).
3.2 HEAD
You can ask the remote server for ONLY the headers by using the --head (-I)
option which will make curl issue a HEAD request. In some special cases
servers deny the HEAD method while others still work, which is a particular
kind of annoyance.
The HEAD method is defined and made so that the server returns the headers
exactly the way it would do for a GET, but without a body. It means that you
may see a Content-Length: in the response headers, but there must not be an
actual body in the HEAD response.
3.3 Multiple URLs in a single command line
A single curl command line may involve one or many URLs. The most common case
is probably to just use one, but you can specify any amount of URLs. Yes
any. No limits. You'll then get requests repeated over and over for all the
given URLs.
Example, send two GETs:
curl http://url1.example.com http://url2.example.com
If you use --data to POST to the URL, using multiple URLs means that you send
that same POST to all the given URLs.
Example, send two POSTs:
curl --data name=curl http://url1.example.com http://url2.example.com
3.4 Multiple HTTP methods in a single command line
Sometimes you need to operate on several URLs in a single command line and do
different HTTP methods on each. For this, you'll enjoy the --next option. It
is basically a separator that separates a bunch of options from the next. All
the URLs before --next will get the same method and will get all the POST
data merged into one.
When curl reaches the --next on the command line, it'll sort of reset the
method and the POST data and allow a new set.
Perhaps this is best shown with a few examples. To send first a HEAD and then
a GET:
curl -I http://example.com --next http://example.com
To first send a POST and then a GET:
curl -d score=10 http://example.com/post.cgi --next http://example.com/results.html
4. HTML forms
4.1 Forms explained
4. Forms
Forms are the general way a web site can present a HTML page with fields for
the user to enter data in, and then press some kind of 'OK' or 'submit'
@ -279,7 +95,7 @@ The Art Of Scripting HTTP Requests Using Curl
Of course there has to be some kind of program in the server end to receive
the data you send. You cannot just invent something out of the air.
4.2 GET
4.1 GET
A GET-form uses the method GET, as specified in HTML like:
@ -305,7 +121,7 @@ The Art Of Scripting HTTP Requests Using Curl
curl "http://www.hotmail.com/when/junk.cgi?birthyear=1905&press=OK"
4.3 POST
4.2 POST
The GET method makes all input field names get displayed in the URL field of
your browser. That's generally a good thing when you want to be able to
@ -342,11 +158,7 @@ The Art Of Scripting HTTP Requests Using Curl
curl --data-urlencode "name=I am Daniel" http://www.example.com
If you repeat --data several times on the command line, curl will
concatenate all the given data pieces - and put a '&' symbol between each
data segment.
4.4 File Upload POST
4.3 File Upload POST
Back in late 1995 they defined an additional way to post data over HTTP. It
is documented in the RFC 1867, why this method sometimes is referred to as
@ -367,7 +179,7 @@ The Art Of Scripting HTTP Requests Using Curl
curl --form upload=@localfilename --form press=OK [URL]
4.5 Hidden Fields
4.4 Hidden Fields
A very common way for HTML based application to pass state information
between pages is to add hidden fields to the forms. Hidden fields are
@ -388,7 +200,7 @@ The Art Of Scripting HTTP Requests Using Curl
curl --data "birthyear=1905&press=OK&person=daniel" [URL]
4.6 Figure Out What A POST Looks Like
4.5 Figure Out What A POST Looks Like
When you're about fill in a form and send to a server by using curl instead
of a browser, you're of course very interested in sending a POST exactly the
@ -401,9 +213,7 @@ The Art Of Scripting HTTP Requests Using Curl
You will then clearly see the data get appended to the URL, separated with a
'?'-letter as GET forms are supposed to.
5. HTTP upload
5.1 PUT
5. PUT
The perhaps best way to upload data to a HTTP server is to use PUT. Then
again, this of course requires that someone put a program or script on the
@ -415,8 +225,6 @@ The Art Of Scripting HTTP Requests Using Curl
6. HTTP Authentication
6.1 Basic Authentication
HTTP Authentication is the ability to tell the server your username and
password so that it can verify that you're allowed to do the request you're
doing. The Basic authentication used in HTTP (which is the type curl uses by
@ -428,14 +236,10 @@ The Art Of Scripting HTTP Requests Using Curl
curl --user name:password http://www.example.com
6.2 Other Authentication
The site might require a different authentication method (check the headers
returned by the server), and then --ntlm, --digest, --negotiate or even
--anyauth might be options that suit you.
6.3 Proxy Authentication
Sometimes your HTTP access is only available through the use of a HTTP
proxy. This seems to be especially common at various companies. A HTTP proxy
may require its own user and password to allow the client to get through to
@ -449,8 +253,6 @@ The Art Of Scripting HTTP Requests Using Curl
If you use any one these user+password options but leave out the password
part, curl will prompt for the password interactively.
6.4 Hiding credentials
Do note that when a program is run, its parameters might be possible to see
when listing the running processes of the system. Thus, other users may be
able to watch your passwords if you pass them as plain command line
@ -460,9 +262,7 @@ The Art Of Scripting HTTP Requests Using Curl
many web sites will not use this concept when they provide logins etc. See
the Web Login chapter further below for more details on that.
7. More HTTP Headers
7.1 Referer
7. Referer
A HTTP request may include a 'referer' field (yes it is misspelled), which
can be used to tell from which URL the client got to this particular
@ -476,7 +276,7 @@ The Art Of Scripting HTTP Requests Using Curl
curl --referer http://www.example.come http://www.example.com
7.2 User Agent
8. User Agent
Very similar to the referer field, all HTTP requests may set the User-Agent
field. It names what user agent (client) that is being used. Many
@ -498,9 +298,7 @@ The Art Of Scripting HTTP Requests Using Curl
curl --user-agent "Mozilla/4.73 [en] (X11; U; Linux 2.2.15 i686)" [URL]
8. Redirects
8.1 Location header
9. Redirects
When a resource is requested from a server, the reply from the server may
include a hint about where the browser should go next to find this page, or a
@ -520,16 +318,7 @@ The Art Of Scripting HTTP Requests Using Curl
only use POST in the first request, and then revert to GET in the following
operations.
8.2 Other redirects
Browser typically support at least two other ways of redirects that curl
doesn't: first the html may contain a meta refresh tag that asks the browser
to load a specific URL after a set number of seconds, or it may use
javascript to do it.
9. Cookies
9.1 Cookie Basics
10. Cookies
The way the web browsers do "client side state control" is by using
cookies. Cookies are just names with associated contents. The cookies are
@ -546,8 +335,6 @@ The Art Of Scripting HTTP Requests Using Curl
must be able to record and send back cookies the way the web application
expects them. The same way browsers deal with them.
9.2 Cookie options
The simplest way to send a few cookies to the server when getting a page with
curl is to add them on the command line like:
@ -564,7 +351,7 @@ The Art Of Scripting HTTP Requests Using Curl
Curl has a full blown cookie parsing engine built-in that comes to use if you
want to reconnect to a server and use cookies that were stored from a
previous connection (or hand-crafted manually to fool the server into
previous connection (or handicrafted manually to fool the server into
believing you had a previous connection). To use previously stored cookies,
you run curl like:
@ -579,18 +366,16 @@ The Art Of Scripting HTTP Requests Using Curl
curl --cookie nada --location http://www.example.com
Curl has the ability to read and write cookie files that use the same file
format that Netscape and Mozilla once used. It is a convenient way to share
cookies between scripts or invokes. The --cookie (-b) switch automatically
detects if a given file is such a cookie file and parses it, and by using the
--cookie-jar (-c) option you'll make curl write a new cookie file at the end
of an operation:
format that Netscape and Mozilla do. It is a convenient way to share cookies
between browsers and automatic scripts. The --cookie (-b) switch
automatically detects if a given file is such a cookie file and parses it,
and by using the --cookie-jar (-c) option you'll make curl write a new cookie
file at the end of an operation:
curl --cookie cookies.txt --cookie-jar newcookies.txt \
http://www.example.com
10. HTTPS
10.1 HTTPS is HTTP secure
11. HTTPS
There are a few ways to do secure HTTP transfers. The by far most common
protocol for doing this is what is generally known as HTTPS, HTTP over
@ -601,14 +386,12 @@ The Art Of Scripting HTTP Requests Using Curl
truckload of advanced features to allow all those encryptions and key
infrastructure mechanisms encrypted HTTP requires.
Curl supports encrypted fetches when built to use a TLS library and it can be
built to use one out of a fairly large set of libraries - "curl -V" will show
which one your curl was built to use (if any!). To get a page from a HTTPS
server, simply run curl like:
Curl supports encrypted fetches thanks to the freely available OpenSSL
libraries. To get a page from a HTTPS server, simply run curl like:
curl https://secure.example.com
10.2 Certificates
11.1 Certificates
In the HTTPS world, you use certificates to validate that you are the one
you claim to be, as an addition to normal passwords. Curl supports client-
@ -628,17 +411,9 @@ The Art Of Scripting HTTP Requests Using Curl
More about server certificate verification and ca cert bundles can be read
in the SSLCERTS document, available online here:
https://curl.haxx.se/docs/sslcerts.html
http://curl.haxx.se/docs/sslcerts.html
At times you may end up with your own CA cert store and then you can tell
curl to use that to verify the server's certificate:
curl --cacert ca-bundle.pem https://example.com/
11. Custom Request Elements
11.1 Modify method and headers
12. Custom Request Elements
Doing fancy stuff, you may need to add or change elements of a single curl
request.
@ -659,26 +434,7 @@ The Art Of Scripting HTTP Requests Using Curl
curl --header "Destination: http://nowhere" http://example.com
11.2 More on changed methods
It should be noted that curl selects which methods to use on its own
depending on what action to ask for. -d will do POST, -I will do HEAD and so
on. If you use the --request / -X option you can change the method keyword
curl selects, but you will not modify curl's behavior. This means that if you
for example use -d "data" to do a POST, you can modify the method to a
PROPFIND with -X and curl will still think it sends a POST. You can change
the normal GET to a POST method by simply adding -X POST in a command line
like:
curl -X POST http://example.org/
... but curl will still think and act as if it sent a GET so it won't send any
request body etc.
12. Web Login
12.1 Some login tricks
13. Web Login
While not strictly just HTTP related, it still cause a lot of people problems
so here's the executive run-down of how the vast majority of all login forms
@ -697,7 +453,7 @@ The Art Of Scripting HTTP Requests Using Curl
sometimes they use such code to set or modify cookie contents. Possibly they
do that to prevent programmed logins, like this manual describes how to...
Anyway, if reading the code isn't enough to let you repeat the behavior
manually, capturing the HTTP requests done by your browsers and analyzing the
manually, capturing the HTTP requests done by your browers and analyzing the
sent cookies is usually a working method to work out how to shortcut the
javascript need.
@ -707,9 +463,7 @@ The Art Of Scripting HTTP Requests Using Curl
to do a proper login POST. Remember that the contents need to be URL encoded
when sent in a normal POST.
13. Debug
13.1 Some debug tricks
14. Debug
Many times when you run curl on a site, you'll notice that the site doesn't
seem to respond the same way to your curl requests as it does to your
@ -719,40 +473,35 @@ The Art Of Scripting HTTP Requests Using Curl
browser's requests:
* Use the --trace-ascii option to store fully detailed logs of the requests
for easier analyzing and better understanding
for easier analyzing and better understanding
* Make sure you check for and use cookies when needed (both reading with
--cookie and writing with --cookie-jar)
--cookie and writing with --cookie-jar)
* Set user-agent to one like a recent popular browser does
* Set referer like it is set by the browser
* If you use POST, make sure you send all the fields and in the same order as
the browser does it.
the browser does it. (See chapter 4.5 above)
A very good helper to make sure you do this right, is the LiveHTTPHeader tool
that lets you view all headers you send and receive with Mozilla/Firefox
(even when using HTTPS). Chrome features similar functionality out of the box
among the developer's tools.
(even when using HTTPS).
A more raw approach is to capture the HTTP traffic on the network with tools
such as ethereal or tcpdump and check what headers that were sent and
received by the browser. (HTTPS makes this technique inefficient.)
14. References
15. References
14.1 Standards
RFC 2616 is a must to read if you want in-depth understanding of the HTTP
protocol.
RFC 7230 is a must to read if you want in-depth understanding of the HTTP
protocol
RFC 3986 explains the URL syntax.
RFC 3986 explains the URL syntax
RFC 2109 defines how cookies are supposed to work.
RFC 1867 defines the HTTP post upload format
RFC 1867 defines the HTTP post upload format.
RFC 6525 defines how HTTP cookies work
14.2 Sites
https://curl.haxx.se is the home of the cURL project
http://curl.haxx.se is the home of the cURL project

View File

@ -1,31 +1,42 @@
_ _ ____ _
___| | | | _ \| |
/ __| | | | |_) | |
| (__| |_| | _ <| |___
\___|\___/|_| \_\_____|
Version Numbers and Releases
============================
Curl is not only curl. Curl is also libcurl. They're actually individually
versioned, but they mostly follow each other rather closely.
The version numbering is always built up using the same system:
X.Y.Z
X.Y[.Z][-preN]
- X is main version number
- Y is release number
- Z is patch number
## Bumping numbers
Where
X is main version number
Y is release number
Z is patch number
N is pre-release number
One of these numbers will get bumped in each new release. The numbers to the
right of a bumped number will be reset to zero. If Z is zero, it may not be
included in the version number.
included in the version number. The pre release number is only included in
pre releases (they're never used in public, official, releases).
The main version number will get bumped when *really* big, world colliding
changes are made. The release number is bumped when changes are performed or
things/features are added. The patch number is bumped when the changes are
mere bugfixes.
changes are made. The release number is bumped when big changes are
performed. The patch number is bumped when the changes are mere bugfixes and
only minor feature changes. The pre-release is a counter, to identify which
pre-release a certain release is.
When reaching the end of a pre-release period, the version without the
pre-release part will be released as a public release.
It means that after release 1.2.3, we can release 2.0 if something really big
has been made, 1.3 if not that big changes were made or 1.2.4 if mostly bugs
were fixed.
were fixed. Before 1.2.4 is released, we might release a 1.2.4-pre1 release
for the brave people to try before the actual release.
Bumping, as in increasing the number with 1, is unconditionally only
affecting one of the numbers (except the ones to the right of it, that may be
@ -45,12 +56,12 @@ Version Numbers and Releases
#define LIBCURL_VERSION_NUM 0xXXYYZZ
Where XX, YY and ZZ are the main version, release and patch numbers in
hexadecimal. All three number fields are always represented using two digits
(eight bits each). 1.2 would appear as "0x010200" while version 9.11.7
appears as "0x090b07".
hexadecimal. All three numbers are always represented using two digits. 1.2
would appear as "0x010200" while version 9.11.7 appears as "0x090b07".
This 6-digit hexadecimal number is always a greater number in a more recent
release. It makes comparisons with greater than and less than work.
This 6-digit hexadecimal number does not show pre-release number, and it is
always a greater number in a more recent release. It makes comparisons with
greater than and less than work.
This number is also available as three separate defines:
`LIBCURL_VERSION_MAJOR`, `LIBCURL_VERSION_MINOR` and `LIBCURL_VERSION_PATCH`.
LIBCURL_VERSION_MAJOR, LIBCURL_VERSION_MINOR and LIBCURL_VERSION_PATCH.

View File

@ -5,11 +5,11 @@
.\" * | (__| |_| | _ <| |___
.\" * \___|\___/|_| \_\_____|
.\" *
.\" * Copyright (C) 1998 - 2012, Daniel Stenberg, <daniel@haxx.se>, et al.
.\" * Copyright (C) 1998 - 2010, Daniel Stenberg, <daniel@haxx.se>, et al.
.\" *
.\" * This software is licensed as described in the file COPYING, which
.\" * you should have received as part of this distribution. The terms
.\" * are also available at https://curl.haxx.se/docs/copyright.html.
.\" * are also available at http://curl.haxx.se/docs/copyright.html.
.\" *
.\" * You may opt to use, copy, modify, merge, publish, distribute and/or sell
.\" * copies of the Software, and permit persons to whom the Software is
@ -93,6 +93,7 @@ What's the installed libcurl version?
How do I build a single file with a one-line command?
$ `curl-config --cc --cflags` -o example example.c `curl-config --libs`
$ `curl-config --cc --cflags --libs` -o example example.c
.SH "SEE ALSO"
.BR curl (1)

File diff suppressed because it is too large Load Diff

View File

@ -4,14 +4,12 @@ certinfo
chkspeed
cookie_interface
debug
externalsocket
fileupload
fopen
ftp-wildcard
ftpget
ftpgetinfo
ftpgetresp
ftpsget
ftpupload
getinfo
getinmemory
@ -19,61 +17,19 @@ http-post
httpcustomheader
httpput
https
imap
imap-append
imap-copy
imap-create
imap-delete
imap-examine
imap-fetch
imap-list
imap-multi
imap-noop
imap-search
imap-ssl
imap-store
imap-tls
multi-app
multi-debugcallback
multi-double
multi-post
multi-single
persistant
pop3-dele
pop3-list
pop3-multi
pop3-noop
pop3-retr
pop3-ssl
pop3-stat
pop3-tls
pop3-top
pop3-uidl
pop3s
pop3slist
post-callback
postinmemory
postit2
progressfunc
resolve
rtsp
sendrecv
sepheaders
sftpget
simple
simplepost
simplesmtp
simplessl
smtp-expn
smtp-mail
smtp-multi
smtp-ssl
smtp-tls
smtp-vrfy
url2file
usercertinmem
xmlstream
http2-download
http2-serverpush
http2-upload
imap-lsub

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,10 +19,9 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* Source code using the multi interface to download many
* files, with a capped maximum amount of simultaneous transfers.
* </DESC>
/* Example application source code using the multi interface to download many
* files, but with a capped maximum amount of simultaneous transfers.
*
* Written by Michael Wallner
*/
@ -63,6 +62,7 @@ static const char *urls[] = {
"http://www.uefa.com",
"http://www.ieee.org",
"http://www.apple.com",
"http://www.sony.com",
"http://www.symantec.com",
"http://www.zdnet.com",
"http://www.fujitsu.com",
@ -127,42 +127,41 @@ int main(void)
uses */
curl_multi_setopt(cm, CURLMOPT_MAXCONNECTS, (long)MAX);
for(C = 0; C < MAX; ++C) {
for (C = 0; C < MAX; ++C) {
init(cm, C);
}
while(U) {
while (U) {
curl_multi_perform(cm, &U);
if(U) {
if (U) {
FD_ZERO(&R);
FD_ZERO(&W);
FD_ZERO(&E);
if(curl_multi_fdset(cm, &R, &W, &E, &M)) {
if (curl_multi_fdset(cm, &R, &W, &E, &M)) {
fprintf(stderr, "E: curl_multi_fdset\n");
return EXIT_FAILURE;
}
if(curl_multi_timeout(cm, &L)) {
if (curl_multi_timeout(cm, &L)) {
fprintf(stderr, "E: curl_multi_timeout\n");
return EXIT_FAILURE;
}
if(L == -1)
if (L == -1)
L = 100;
if(M == -1) {
if (M == -1) {
#ifdef WIN32
Sleep(L);
#else
sleep((unsigned int)L / 1000);
sleep(L / 1000);
#endif
}
else {
} else {
T.tv_sec = L/1000;
T.tv_usec = (L%1000)*1000;
if(0 > select(M+1, &R, &W, &E, &T)) {
if (0 > select(M+1, &R, &W, &E, &T)) {
fprintf(stderr, "E: select(%i,,,,%li): %i: %s\n",
M+1, L, errno, strerror(errno));
return EXIT_FAILURE;
@ -170,8 +169,8 @@ int main(void)
}
}
while((msg = curl_multi_info_read(cm, &Q))) {
if(msg->msg == CURLMSG_DONE) {
while ((msg = curl_multi_info_read(cm, &Q))) {
if (msg->msg == CURLMSG_DONE) {
char *url;
CURL *e = msg->easy_handle;
curl_easy_getinfo(msg->easy_handle, CURLINFO_PRIVATE, &url);
@ -183,7 +182,7 @@ int main(void)
else {
fprintf(stderr, "E: CURLMsg (%d)\n", msg->msg);
}
if(C < CNT) {
if (C < CNT) {
init(cm, C++);
U++; /* just to prevent it from remaining at 0 if there are more
URLs to get */

View File

@ -5,11 +5,11 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
@ -23,7 +23,7 @@
AUTOMAKE_OPTIONS = foreign nostdinc
EXTRA_DIST = README Makefile.example Makefile.inc Makefile.m32 \
Makefile.netware makefile.dj $(COMPLICATED_EXAMPLES)
makefile.dj $(COMPLICATED_EXAMPLES)
# Specify our include paths here, and do it relative to $(top_srcdir) and
# $(top_builddir), to ensure that these paths which belong to the library
@ -31,36 +31,26 @@ EXTRA_DIST = README Makefile.example Makefile.inc Makefile.m32 \
# might possibly already be installed in the system.
#
# $(top_builddir)/include/curl for generated curlbuild.h included from curl.h
# $(top_builddir)/include for generated curlbuild.h inc. from lib/curl_setup.h
# $(top_builddir)/include for generated curlbuild.h included from lib/setup.h
# $(top_srcdir)/include is for libcurl's external include files
AM_CPPFLAGS = -I$(top_builddir)/include/curl \
-I$(top_builddir)/include \
-I$(top_srcdir)/include
INCLUDES = -I$(top_builddir)/include/curl \
-I$(top_builddir)/include \
-I$(top_srcdir)/include
LIBDIR = $(top_builddir)/lib
# Avoid libcurl obsolete stuff
AM_CPPFLAGS += -DCURL_NO_OLDIES
if USE_CPPFLAG_CURL_STATICLIB
AM_CPPFLAGS += -DCURL_STATICLIB
if STATICLIB
# we need this define when building with a static lib on Windows
STATICCPPFLAGS = -DCURL_STATICLIB
endif
# Prevent LIBS from being used for all link targets
LIBS = $(BLANK_AT_MAKETIME)
CPPFLAGS = -DCURL_NO_OLDIES $(STATICCPPFLAGS)
# Dependencies
if USE_EXPLICIT_LIB_DEPS
LDADD = $(LIBDIR)/libcurl.la @LIBCURL_LIBS@
else
LDADD = $(LIBDIR)/libcurl.la
endif
# Makefile.inc provides the check_PROGRAMS and COMPLICATED_EXAMPLES defines
include Makefile.inc
all: $(check_PROGRAMS)
checksrc:
@@PERL@ $(top_srcdir)/lib/checksrc.pl -D$(top_srcdir)/docs/examples *.c

View File

@ -9,7 +9,7 @@
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is

View File

@ -1,43 +1,14 @@
#***************************************************************************
# _ _ ____ _
# Project ___| | | | _ \| |
# / __| | | | |_) | |
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
# furnished to do so, under the terms of the COPYING file.
#
# This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
# KIND, either express or implied.
#
###########################################################################
# These are all libcurl example programs to be test compiled
check_PROGRAMS = 10-at-a-time anyauthput cookie_interface debug fileupload \
fopen ftpget ftpgetresp ftpupload getinfo getinmemory http-post httpput \
https multi-app multi-debugcallback multi-double multi-post multi-single \
persistant post-callback postit2 sepheaders simple simplepost simplessl \
sendrecv httpcustomheader certinfo chkspeed ftpgetinfo ftp-wildcard \
smtp-mail smtp-multi smtp-ssl smtp-tls smtp-vrfy smtp-expn rtsp \
externalsocket resolve progressfunc pop3-retr pop3-list pop3-uidl \
pop3-dele pop3-top pop3-stat pop3-noop pop3-ssl pop3-tls pop3-multi \
imap-list imap-lsub imap-fetch imap-store imap-append imap-examine \
imap-search imap-create imap-delete imap-copy imap-noop imap-ssl \
imap-tls imap-multi url2file sftpget ftpsget postinmemory http2-download \
http2-upload http2-serverpush getredirect
sendrecv httpcustomheader certinfo chkspeed ftpgetinfo ftp-wildcard \
smtp-multi simplesmtp smtp-tls
# These examples require external dependencies that may not be commonly
# available on POSIX systems, so don't bother attempting to compile them here.
COMPLICATED_EXAMPLES = curlgtk.c curlx.c htmltitle.cpp cacertinmem.c \
ftpuploadresume.c ghiper.c hiperfifo.c htmltidy.c multithread.c \
COMPLICATED_EXAMPLES = curlgtk.c curlx.c htmltitle.cc cacertinmem.c \
ftpuploadresume.c ghiper.c hiperfifo.c htmltidy.c multithread.c \
opensslthreadlock.c sampleconv.c synctime.c threaded-ssl.c evhiperfifo.c \
smooth-gtk-thread.c version-check.pl href_extractor.c asiohiper.cpp \
multi-uv.c xmlstream.c usercertinmem.c sessioninfo.c
smooth-gtk-thread.c version-check.pl

View File

@ -5,11 +5,11 @@
# | (__| |_| | _ <| |___
# \___|\___/|_| \_\_____|
#
# Copyright (C) 1998 - 2015, Daniel Stenberg, <daniel@haxx.se>, et al.
# Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at https://curl.haxx.se/docs/copyright.html.
# are also available at http://curl.haxx.se/docs/copyright.html.
#
# You may opt to use, copy, modify, merge, publish, distribute and/or sell
# copies of the Software, and permit persons to whom the Software is
@ -19,167 +19,57 @@
# KIND, either express or implied.
#
###########################################################################
#########################################################################
#
## Makefile for building curl examples with MingW (GCC-3.2 or later)
## and optionally OpenSSL (1.0.2a), libssh2 (1.5), zlib (1.2.8), librtmp (2.4)
## Makefile for building curl examples with MingW32
## and optionally OpenSSL (0.9.8), libssh2 (0.18), zlib (1.2.3)
##
## Usage: mingw32-make -f Makefile.m32 CFG=-feature1[-feature2][-feature3][...]
## Example: mingw32-make -f Makefile.m32 CFG=-zlib-ssl-spi-winidn
## Usage:
## mingw32-make -f Makefile.m32 [SSL=1] [SSH2=1] [ZLIB=1] [SSPI=1] [IPV6=1] [DYN=1]
##
## Hint: you can also set environment vars to control the build, f.e.:
## set ZLIB_PATH=c:/zlib-1.2.8
## set ZLIB_PATH=c:/zlib-1.2.3
## set ZLIB=1
#
###########################################################################
##
#########################################################################
# Edit the path below to point to the base of your Zlib sources.
ifndef ZLIB_PATH
ZLIB_PATH = ../../../zlib-1.2.8
ZLIB_PATH = ../../zlib-1.2.3
endif
# Edit the path below to point to the base of your OpenSSL package.
ifndef OPENSSL_PATH
OPENSSL_PATH = ../../../openssl-1.0.2a
OPENSSL_PATH = ../../openssl-0.9.8k
endif
# Edit the path below to point to the base of your LibSSH2 package.
ifndef LIBSSH2_PATH
LIBSSH2_PATH = ../../../libssh2-1.5.0
endif
# Edit the path below to point to the base of your librtmp package.
ifndef LIBRTMP_PATH
LIBRTMP_PATH = ../../../librtmp-2.4
endif
# Edit the path below to point to the base of your libidn package.
ifndef LIBIDN_PATH
LIBIDN_PATH = ../../../libidn-1.32
endif
# Edit the path below to point to the base of your MS IDN package.
# Microsoft Internationalized Domain Names (IDN) Mitigation APIs 1.1
# https://www.microsoft.com/en-us/download/details.aspx?id=734
ifndef WINIDN_PATH
WINIDN_PATH = ../../../Microsoft IDN Mitigation APIs
LIBSSH2_PATH = ../../libssh2-1.2
endif
# Edit the path below to point to the base of your Novell LDAP NDK.
ifndef LDAP_SDK
LDAP_SDK = c:/novell/ndk/cldapsdk/win32
endif
# Edit the path below to point to the base of your nghttp2 package.
ifndef NGHTTP2_PATH
NGHTTP2_PATH = ../../../nghttp2-1.0.0
endif
PROOT = ../..
ARES_LIB = $(PROOT)/ares
# Edit the path below to point to the base of your c-ares package.
ifndef LIBCARES_PATH
LIBCARES_PATH = $(PROOT)/ares
endif
SSL = 1
ZLIB = 1
# Edit the var below to set to your architecture or set environment var.
ifndef ARCH
ifeq ($(findstring x86_64,$(shell $(CC) -dumpmachine)),x86_64)
ARCH = w64
else
ARCH = w32
endif
endif
CC = $(CROSSPREFIX)gcc
CFLAGS = -g -O2 -Wall
CFLAGS += -fno-strict-aliasing
ifeq ($(ARCH),w64)
CFLAGS += -m64 -D_AMD64_
LDFLAGS += -m64
RCFLAGS += -F pe-x86-64
else
CFLAGS += -m32
LDFLAGS += -m32
RCFLAGS += -F pe-i386
endif
CC = gcc
CFLAGS = -g -O2 -Wall
# comment LDFLAGS below to keep debug info
LDFLAGS = -s
RC = $(CROSSPREFIX)windres
RCFLAGS = --include-dir=$(PROOT)/include -O COFF -i
# Platform-dependent helper tool macros
ifeq ($(findstring /sh,$(SHELL)),/sh)
DEL = rm -f $1
RMDIR = rm -fr $1
MKDIR = mkdir -p $1
COPY = -cp -afv $1 $2
#COPYR = -cp -afr $1/* $2
COPYR = -rsync -aC $1/* $2
TOUCH = touch $1
CAT = cat
ECHONL = echo ""
DL = '
else
ifeq "$(OS)" "Windows_NT"
DEL = -del 2>NUL /q /f $(subst /,\,$1)
RMDIR = -rd 2>NUL /q /s $(subst /,\,$1)
else
DEL = -del 2>NUL $(subst /,\,$1)
RMDIR = -deltree 2>NUL /y $(subst /,\,$1)
endif
MKDIR = -md 2>NUL $(subst /,\,$1)
COPY = -copy 2>NUL /y $(subst /,\,$1) $(subst /,\,$2)
COPYR = -xcopy 2>NUL /q /y /e $(subst /,\,$1) $(subst /,\,$2)
TOUCH = copy 2>&1>NUL /b $(subst /,\,$1) +,,
CAT = type
ECHONL = $(ComSpec) /c echo.
endif
LDFLAGS = -s
RC = windres
RCFLAGS = --include-dir=$(PROOT)/include -O COFF -i
RM = del /q /f > NUL 2>&1
CP = copy
########################################################
## Nothing more to do below this line!
ifeq ($(findstring -dyn,$(CFG)),-dyn)
DYN = 1
endif
ifeq ($(findstring -ares,$(CFG)),-ares)
ARES = 1
endif
ifeq ($(findstring -rtmp,$(CFG)),-rtmp)
RTMP = 1
SSL = 1
ZLIB = 1
endif
ifeq ($(findstring -ssh2,$(CFG)),-ssh2)
SSH2 = 1
SSL = 1
ZLIB = 1
endif
ifeq ($(findstring -ssl,$(CFG)),-ssl)
SSL = 1
endif
ifeq ($(findstring -zlib,$(CFG)),-zlib)
ZLIB = 1
endif
ifeq ($(findstring -idn,$(CFG)),-idn)
IDN = 1
endif
ifeq ($(findstring -winidn,$(CFG)),-winidn)
WINIDN = 1
endif
ifeq ($(findstring -sspi,$(CFG)),-sspi)
SSPI = 1
endif
ifeq ($(findstring -ldaps,$(CFG)),-ldaps)
LDAPS = 1
endif
ifeq ($(findstring -ipv6,$(CFG)),-ipv6)
IPV6 = 1
endif
ifeq ($(findstring -metalink,$(CFG)),-metalink)
METALINK = 1
endif
ifeq ($(findstring -winssl,$(CFG)),-winssl)
WINSSL = 1
SSPI = 1
endif
ifeq ($(findstring -nghttp2,$(CFG)),-nghttp2)
NGHTTP2 = 1
endif
INCLUDES = -I. -I$(PROOT) -I$(PROOT)/include -I$(PROOT)/lib
LINK = $(CC) $(LDFLAGS) -o $@
ifdef DYN
curl_DEPENDENCIES = $(PROOT)/lib/libcurldll.a $(PROOT)/lib/libcurl.dll
@ -188,68 +78,37 @@ else
curl_DEPENDENCIES = $(PROOT)/lib/libcurl.a
curl_LDADD = -L$(PROOT)/lib -lcurl
CFLAGS += -DCURL_STATICLIB
LDFLAGS += -static
endif
ifdef ARES
ifndef DYN
curl_DEPENDENCIES += $(LIBCARES_PATH)/libcares.a
curl_DEPENDENCIES += $(ARES_LIB)/libcares.a
endif
CFLAGS += -DUSE_ARES
curl_LDADD += -L"$(LIBCARES_PATH)" -lcares
endif
ifdef RTMP
CFLAGS += -DUSE_LIBRTMP
curl_LDADD += -L"$(LIBRTMP_PATH)/librtmp" -lrtmp -lwinmm
endif
ifdef NGHTTP2
CFLAGS += -DUSE_NGHTTP2
curl_LDADD += -L"$(NGHTTP2_PATH)/lib" -lnghttp2
curl_LDADD += -L$(ARES_LIB) -lcares
endif
ifdef SSH2
CFLAGS += -DUSE_LIBSSH2 -DHAVE_LIBSSH2_H
curl_LDADD += -L"$(LIBSSH2_PATH)/win32" -lssh2
curl_LDADD += -L$(LIBSSH2_PATH)/win32 -lssh2
endif
ifdef SSL
ifndef OPENSSL_LIBPATH
OPENSSL_LIBS = -lssl -lcrypto
ifeq "$(wildcard $(OPENSSL_PATH)/out)" "$(OPENSSL_PATH)/out"
OPENSSL_LIBPATH = $(OPENSSL_PATH)/out
ifdef DYN
OPENSSL_LIBS = -lssl32 -leay32
endif
endif
ifeq "$(wildcard $(OPENSSL_PATH)/lib)" "$(OPENSSL_PATH)/lib"
OPENSSL_LIBPATH = $(OPENSSL_PATH)/lib
endif
INCLUDES += -I"$(OPENSSL_PATH)/outinc"
CFLAGS += -DUSE_SSLEAY -DHAVE_OPENSSL_ENGINE_H
ifdef DYN
curl_LDADD += -L$(OPENSSL_PATH)/out -leay32 -lssl32
else
curl_LDADD += -L$(OPENSSL_PATH)/out -lssl -lcrypto -lgdi32
endif
ifndef DYN
OPENSSL_LIBS += -lgdi32 -lcrypt32
endif
CFLAGS += -DUSE_OPENSSL
curl_LDADD += -L"$(OPENSSL_LIBPATH)" $(OPENSSL_LIBS)
endif
ifdef ZLIB
INCLUDES += -I"$(ZLIB_PATH)"
CFLAGS += -DHAVE_LIBZ -DHAVE_ZLIB_H
curl_LDADD += -L"$(ZLIB_PATH)" -lz
endif
ifdef IDN
CFLAGS += -DUSE_LIBIDN
curl_LDADD += -L"$(LIBIDN_PATH)/lib" -lidn
else
ifdef WINIDN
CFLAGS += -DUSE_WIN32_IDN
curl_LDADD += -L"$(WINIDN_PATH)" -lnormaliz
endif
curl_LDADD += -L$(ZLIB_PATH) -lz
endif
ifdef SSPI
CFLAGS += -DUSE_WINDOWS_SSPI
ifdef WINSSL
CFLAGS += -DUSE_SCHANNEL
endif
endif
ifdef IPV6
CFLAGS += -DENABLE_IPV6 -D_WIN32_WINNT=0x0501
CFLAGS += -DENABLE_IPV6
endif
ifdef LDAPS
CFLAGS += -DHAVE_LDAP_SSL
@ -264,34 +123,32 @@ ifdef USE_LDAP_OPENLDAP
endif
ifndef USE_LDAP_NOVELL
ifndef USE_LDAP_OPENLDAP
curl_LDADD += -lwldap32
curl_LDADD += -lwldap32
endif
endif
curl_LDADD += -lws2_32
COMPILE = $(CC) $(INCLUDES) $(CFLAGS)
# Makefile.inc provides the check_PROGRAMS and COMPLICATED_EXAMPLES defines
include Makefile.inc
check_PROGRAMS := $(patsubst %,%.exe,$(strip $(check_PROGRAMS)))
check_PROGRAMS += ftpuploadresume.exe synctime.exe
example_PROGRAMS := $(patsubst %,%.exe,$(strip $(check_PROGRAMS)))
.PRECIOUS: %.o
.SUFFIXES: .rc .res .o .exe
all: $(check_PROGRAMS)
all: $(example_PROGRAMS)
%.exe: %.o $(curl_DEPENDENCIES)
$(CC) $(LDFLAGS) -o $@ $< $(curl_LDADD)
.o.exe: $(curl_DEPENDENCIES)
$(LINK) $< $(curl_LDADD)
%.o: %.c
$(CC) $(INCLUDES) $(CFLAGS) -c $<
.c.o:
$(COMPILE) -c $<
%.res: %.rc
.rc.res:
$(RC) $(RCFLAGS) $< -o $@
clean:
@$(call DEL, $(check_PROGRAMS:.exe=.o))
$(RM) $(example_PROGRAMS)
distclean vclean: clean
@$(call DEL, $(check_PROGRAMS))

View File

@ -1,434 +0,0 @@
#################################################################
#
## Makefile for building curl.nlm (NetWare version - gnu make)
## Use: make -f Makefile.netware
##
## Comments to: Guenter Knauf http://www.gknw.net/phpbb
#
#################################################################
# Edit the path below to point to the base of your Novell NDK.
ifndef NDKBASE
NDKBASE = c:/novell
endif
# Edit the path below to point to the base of your Zlib sources.
ifndef ZLIB_PATH
ZLIB_PATH = ../../../zlib-1.2.8
endif
# Edit the path below to point to the base of your OpenSSL package.
ifndef OPENSSL_PATH
OPENSSL_PATH = ../../../openssl-1.0.2a
endif
# Edit the path below to point to the base of your LibSSH2 package.
ifndef LIBSSH2_PATH
LIBSSH2_PATH = ../../../libssh2-1.5.0
endif
# Edit the path below to point to the base of your axTLS package.
ifndef AXTLS_PATH
AXTLS_PATH = ../../../axTLS-1.2.7
endif
# Edit the path below to point to the base of your libidn package.
ifndef LIBIDN_PATH
LIBIDN_PATH = ../../../libidn-1.32
endif
# Edit the path below to point to the base of your librtmp package.
ifndef LIBRTMP_PATH
LIBRTMP_PATH = ../../../librtmp-2.4
endif
# Edit the path below to point to the base of your fbopenssl package.
ifndef FBOPENSSL_PATH
FBOPENSSL_PATH = ../../fbopenssl-0.4
endif
# Edit the path below to point to the base of your c-ares package.
ifndef LIBCARES_PATH
LIBCARES_PATH = ../../ares
endif
ifndef INSTDIR
INSTDIR = ..$(DS)..$(DS)curl-$(LIBCURL_VERSION_STR)-bin-nw
endif
# Edit the vars below to change NLM target settings.
TARGET = examples
VERSION = $(LIBCURL_VERSION)
COPYR = Copyright (C) $(LIBCURL_COPYRIGHT_STR)
DESCR = cURL ($(LIBARCH))
MTSAFE = YES
STACK = 8192
SCREEN = Example Program
# Comment the line below if you dont want to load protected automatically.
# LDRING = 3
# Uncomment the next line to enable linking with POSIX semantics.
# POSIXFL = 1
# Edit the var below to point to your lib architecture.
ifndef LIBARCH
LIBARCH = LIBC
endif
# must be equal to NDEBUG or DEBUG, CURLDEBUG
ifndef DB
DB = NDEBUG
endif
# Optimization: -O<n> or debugging: -g
ifeq ($(DB),NDEBUG)
OPT = -O2
OBJDIR = release
else
OPT = -g
OBJDIR = debug
endif
# The following lines defines your compiler.
ifdef CWFolder
METROWERKS = $(CWFolder)
endif
ifdef METROWERKS
# MWCW_PATH = $(subst \,/,$(METROWERKS))/Novell Support
MWCW_PATH = $(subst \,/,$(METROWERKS))/Novell Support/Metrowerks Support
CC = mwccnlm
else
CC = gcc
endif
PERL = perl
# Here you can find a native Win32 binary of the original awk:
# http://www.gknw.net/development/prgtools/awk-20100523.zip
AWK = awk
CP = cp -afv
MKDIR = mkdir
# RM = rm -f
# If you want to mark the target as MTSAFE you will need a tool for
# generating the xdc data for the linker; here's a minimal tool:
# http://www.gknw.net/development/prgtools/mkxdc.zip
MPKXDC = mkxdc
# LIBARCH_U = $(shell $(AWK) 'BEGIN {print toupper(ARGV[1])}' $(LIBARCH))
LIBARCH_L = $(shell $(AWK) 'BEGIN {print tolower(ARGV[1])}' $(LIBARCH))
# Include the version info retrieved from curlver.h
-include $(OBJDIR)/version.inc
# Global flags for all compilers
CFLAGS += $(OPT) -D$(DB) -DNETWARE -DHAVE_CONFIG_H -nostdinc
ifeq ($(CC),mwccnlm)
LD = mwldnlm
LDFLAGS = -nostdlib $< $(PRELUDE) $(LDLIBS) -o $@ -commandfile
LIBEXT = lib
CFLAGS += -gccinc -inline off -opt nointrinsics -proc 586
CFLAGS += -relax_pointers
#CFLAGS += -w on
ifeq ($(LIBARCH),LIBC)
ifeq ($(POSIXFL),1)
PRELUDE = $(NDK_LIBC)/imports/posixpre.o
else
PRELUDE = $(NDK_LIBC)/imports/libcpre.o
endif
CFLAGS += -align 4
else
# PRELUDE = $(NDK_CLIB)/imports/clibpre.o
# to avoid the __init_* / __deinit_* whoes dont use prelude from NDK
PRELUDE = "$(MWCW_PATH)/libraries/runtime/prelude.obj"
# CFLAGS += -include "$(MWCW_PATH)/headers/nlm_clib_prefix.h"
CFLAGS += -align 1
endif
else
LD = nlmconv
LDFLAGS = -T
LIBEXT = a
CFLAGS += -m32
CFLAGS += -fno-builtin -fno-strict-aliasing
ifeq ($(findstring gcc,$(CC)),gcc)
CFLAGS += -fpcc-struct-return
endif
CFLAGS += -Wall # -pedantic
ifeq ($(LIBARCH),LIBC)
ifeq ($(POSIXFL),1)
PRELUDE = $(NDK_LIBC)/imports/posixpre.gcc.o
else
PRELUDE = $(NDK_LIBC)/imports/libcpre.gcc.o
endif
else
# PRELUDE = $(NDK_CLIB)/imports/clibpre.gcc.o
# to avoid the __init_* / __deinit_* whoes dont use prelude from NDK
# http://www.gknw.net/development/mk_nlm/gcc_pre.zip
PRELUDE = $(NDK_ROOT)/pre/prelude.o
CFLAGS += -include $(NDKBASE)/nlmconv/genlm.h
endif
endif
NDK_ROOT = $(NDKBASE)/ndk
ifndef NDK_CLIB
NDK_CLIB = $(NDK_ROOT)/nwsdk
endif
ifndef NDK_LIBC
NDK_LIBC = $(NDK_ROOT)/libc
endif
ifndef NDK_LDAP
NDK_LDAP = $(NDK_ROOT)/cldapsdk/netware
endif
CURL_INC = ../../include
CURL_LIB = ../../lib
INCLUDES = -I$(CURL_INC)
ifeq ($(findstring -static,$(CFG)),-static)
LINK_STATIC = 1
endif
ifeq ($(findstring -ares,$(CFG)),-ares)
WITH_ARES = 1
endif
ifeq ($(findstring -rtmp,$(CFG)),-rtmp)
WITH_RTMP = 1
WITH_SSL = 1
WITH_ZLIB = 1
endif
ifeq ($(findstring -ssh2,$(CFG)),-ssh2)
WITH_SSH2 = 1
WITH_SSL = 1
WITH_ZLIB = 1
endif
ifeq ($(findstring -axtls,$(CFG)),-axtls)
WITH_AXTLS = 1
WITH_SSL =
else
ifeq ($(findstring -ssl,$(CFG)),-ssl)
WITH_SSL = 1
endif
endif
ifeq ($(findstring -zlib,$(CFG)),-zlib)
WITH_ZLIB = 1
endif
ifeq ($(findstring -idn,$(CFG)),-idn)
WITH_IDN = 1
endif
ifeq ($(findstring -ipv6,$(CFG)),-ipv6)
ENABLE_IPV6 = 1
endif
ifdef LINK_STATIC
LDLIBS = $(CURL_LIB)/libcurl.$(LIBEXT)
ifdef WITH_ARES
LDLIBS += $(LIBCARES_PATH)/libcares.$(LIBEXT)
endif
else
MODULES = libcurl.nlm
IMPORTS = @$(CURL_LIB)/libcurl.imp
endif
ifdef WITH_SSH2
# INCLUDES += -I$(LIBSSH2_PATH)/include
ifdef LINK_STATIC
LDLIBS += $(LIBSSH2_PATH)/nw/libssh2.$(LIBEXT)
else
MODULES += libssh2.nlm
IMPORTS += @$(LIBSSH2_PATH)/nw/libssh2.imp
endif
endif
ifdef WITH_RTMP
# INCLUDES += -I$(LIBRTMP_PATH)
ifdef LINK_STATIC
LDLIBS += $(LIBRTMP_PATH)/librtmp/librtmp.$(LIBEXT)
endif
endif
ifdef WITH_SSL
INCLUDES += -I$(OPENSSL_PATH)/outinc_nw_$(LIBARCH_L)
LDLIBS += $(OPENSSL_PATH)/out_nw_$(LIBARCH_L)/ssl.$(LIBEXT)
LDLIBS += $(OPENSSL_PATH)/out_nw_$(LIBARCH_L)/crypto.$(LIBEXT)
IMPORTS += GetProcessSwitchCount RunningProcess
else
ifdef WITH_AXTLS
INCLUDES += -I$(AXTLS_PATH)/inc
ifdef LINK_STATIC
LDLIBS += $(AXTLS_PATH)/lib/libaxtls.$(LIBEXT)
else
MODULES += libaxtls.nlm
IMPORTS += $(AXTLS_PATH)/lib/libaxtls.imp
endif
endif
endif
ifdef WITH_ZLIB
# INCLUDES += -I$(ZLIB_PATH)
ifdef LINK_STATIC
LDLIBS += $(ZLIB_PATH)/nw/$(LIBARCH)/libz.$(LIBEXT)
else
MODULES += libz.nlm
IMPORTS += @$(ZLIB_PATH)/nw/$(LIBARCH)/libz.imp
endif
endif
ifdef WITH_IDN
# INCLUDES += -I$(LIBIDN_PATH)/include
LDLIBS += $(LIBIDN_PATH)/lib/libidn.$(LIBEXT)
endif
ifeq ($(LIBARCH),LIBC)
INCLUDES += -I$(NDK_LIBC)/include
# INCLUDES += -I$(NDK_LIBC)/include/nks
# INCLUDES += -I$(NDK_LIBC)/include/winsock
CFLAGS += -D_POSIX_SOURCE
else
INCLUDES += -I$(NDK_CLIB)/include/nlm
# INCLUDES += -I$(NDK_CLIB)/include
endif
ifndef DISABLE_LDAP
# INCLUDES += -I$(NDK_LDAP)/$(LIBARCH_L)/inc
endif
CFLAGS += $(INCLUDES)
ifeq ($(MTSAFE),YES)
XDCOPT = -n
endif
ifeq ($(MTSAFE),NO)
XDCOPT = -u
endif
ifdef XDCOPT
XDCDATA = $(OBJDIR)/$(TARGET).xdc
endif
ifeq ($(findstring /sh,$(SHELL)),/sh)
DL = '
DS = /
PCT = %
#-include $(NDKBASE)/nlmconv/ncpfs.inc
else
DS = \\
PCT = %%
endif
# Makefile.inc provides the CSOURCES and HHEADERS defines
include Makefile.inc
check_PROGRAMS := $(patsubst %,%.nlm,$(strip $(check_PROGRAMS)))
.PRECIOUS: $(OBJDIR)/%.o $(OBJDIR)/%.def $(OBJDIR)/%.xdc
all: prebuild $(check_PROGRAMS)
prebuild: $(OBJDIR) $(OBJDIR)/version.inc
$(OBJDIR)/%.o: %.c
@echo Compiling $<
$(CC) $(CFLAGS) -c $< -o $@
$(OBJDIR)/version.inc: $(CURL_INC)/curl/curlver.h $(OBJDIR)
@echo Creating $@
@$(AWK) -f ../../packages/NetWare/get_ver.awk $< > $@
install: $(INSTDIR) all
@$(CP) $(check_PROGRAMS) $(INSTDIR)
clean:
-$(RM) -r $(OBJDIR)
distclean vclean: clean
-$(RM) $(check_PROGRAMS)
$(OBJDIR) $(INSTDIR):
@$(MKDIR) $@
%.nlm: $(OBJDIR)/%.o $(OBJDIR)/%.def $(XDCDATA)
@echo Linking $@
@-$(RM) $@
@$(LD) $(LDFLAGS) $(OBJDIR)/$(@:.nlm=.def)
$(OBJDIR)/%.xdc: Makefile.netware
@echo Creating $@
@$(MPKXDC) $(XDCOPT) $@
$(OBJDIR)/%.def: Makefile.netware
@echo $(DL)# DEF file for linking with $(LD)$(DL) > $@
@echo $(DL)# Do not edit this file - it is created by Make!$(DL) >> $@
@echo $(DL)# All your changes will be lost!!$(DL) >> $@
@echo $(DL)#$(DL) >> $@
@echo $(DL)copyright "$(COPYR)"$(DL) >> $@
@echo $(DL)description "$(DESCR) $(notdir $(@:.def=)) Example"$(DL) >> $@
@echo $(DL)version $(VERSION)$(DL) >> $@
ifdef NLMTYPE
@echo $(DL)type $(NLMTYPE)$(DL) >> $@
endif
ifdef STACK
@echo $(DL)stack $(STACK)$(DL) >> $@
endif
ifdef SCREEN
@echo $(DL)screenname "$(DESCR) $(notdir $(@:.def=)) $(SCREEN)"$(DL) >> $@
else
@echo $(DL)screenname "DEFAULT"$(DL) >> $@
endif
ifneq ($(DB),NDEBUG)
@echo $(DL)debug$(DL) >> $@
endif
@echo $(DL)threadname "_$(notdir $(@:.def=))"$(DL) >> $@
ifdef XDCDATA
@echo $(DL)xdcdata $(XDCDATA)$(DL) >> $@
endif
ifeq ($(LDRING),0)
@echo $(DL)flag_on 16$(DL) >> $@
endif
ifeq ($(LDRING),3)
@echo $(DL)flag_on 512$(DL) >> $@
endif
ifeq ($(LIBARCH),CLIB)
@echo $(DL)start _Prelude$(DL) >> $@
@echo $(DL)exit _Stop$(DL) >> $@
@echo $(DL)import @$(NDK_CLIB)/imports/clib.imp$(DL) >> $@
@echo $(DL)import @$(NDK_CLIB)/imports/threads.imp$(DL) >> $@
@echo $(DL)import @$(NDK_CLIB)/imports/nlmlib.imp$(DL) >> $@
@echo $(DL)import @$(NDK_CLIB)/imports/socklib.imp$(DL) >> $@
@echo $(DL)module clib$(DL) >> $@
ifndef DISABLE_LDAP
@echo $(DL)import @$(NDK_LDAP)/clib/imports/ldapsdk.imp$(DL) >> $@
@echo $(DL)import @$(NDK_LDAP)/clib/imports/ldapssl.imp$(DL) >> $@
# @echo $(DL)import @$(NDK_LDAP)/clib/imports/ldapx.imp$(DL) >> $@
@echo $(DL)module ldapsdk ldapssl$(DL) >> $@
endif
else
ifeq ($(POSIXFL),1)
@echo $(DL)flag_on 4194304$(DL) >> $@
endif
@echo $(DL)flag_on 64$(DL) >> $@
@echo $(DL)pseudopreemption$(DL) >> $@
ifeq ($(findstring posixpre,$(PRELUDE)),posixpre)
@echo $(DL)start POSIX_Start$(DL) >> $@
@echo $(DL)exit POSIX_Stop$(DL) >> $@
@echo $(DL)check POSIX_CheckUnload$(DL) >> $@
else
@echo $(DL)start _LibCPrelude$(DL) >> $@
@echo $(DL)exit _LibCPostlude$(DL) >> $@
@echo $(DL)check _LibCCheckUnload$(DL) >> $@
endif
@echo $(DL)import @$(NDK_LIBC)/imports/libc.imp$(DL) >> $@
@echo $(DL)import @$(NDK_LIBC)/imports/netware.imp$(DL) >> $@
@echo $(DL)module libc$(DL) >> $@
ifndef DISABLE_LDAP
@echo $(DL)import @$(NDK_LDAP)/libc/imports/lldapsdk.imp$(DL) >> $@
@echo $(DL)import @$(NDK_LDAP)/libc/imports/lldapssl.imp$(DL) >> $@
# @echo $(DL)import @$(NDK_LDAP)/libc/imports/lldapx.imp$(DL) >> $@
@echo $(DL)module lldapsdk lldapssl$(DL) >> $@
endif
endif
ifdef MODULES
@echo $(DL)module $(MODULES)$(DL) >> $@
endif
ifdef EXPORTS
@echo $(DL)export $(EXPORTS)$(DL) >> $@
endif
ifdef IMPORTS
@echo $(DL)import $(IMPORTS)$(DL) >> $@
endif
ifeq ($(findstring nlmconv,$(LD)),nlmconv)
@echo $(DL)input $(PRELUDE)$(DL) >> $@
@echo $(DL)input $(@:.def=.o)$(DL) >> $@
ifdef LDLIBS
@echo $(DL)input $(LDLIBS)$(DL) >> $@
endif
@echo $(DL)output $(notdir $(@:.def=.nlm))$(DL) >> $@
endif

View File

@ -32,7 +32,45 @@ actually torture our web site with your tests! Thanks.
EXAMPLES
Each example source code file is designed to be and work stand-alone and
rather self-explanatory. The examples may at times lack the level of error
checks you need in a real world, but that is then only for the sake of
readability: to make the code smaller and easier to follow.
anyauthput.c - HTTP PUT using "any" authentication method
cacertinmem.c - Use a built-in PEM certificate to retrieve a https page
cookie_interface.c - shows usage of simple cookie interface
curlgtk.c - download using a GTK progress bar
curlx.c - getting file info from the remote cert data
debug.c - showing how to use the debug callback
fileupload.c - uploading to a file:// URL
fopen.c - fopen() layer that supports opening URLs and files
ftpget.c - simple getting a file from FTP
ftpgetresp.c - get the response strings from the FTP server
ftpupload.c - upload a file to an FTP server
ftpuploadresume.c - resume an upload to an FTP server
getinfo.c - get the Content-Type from the recent transfer
getinmemory.c - download a file to memory only
ghiper.c - curl_multi_socket() using code with glib-2
hiperfifo.c - downloads all URLs written to the fifo, using
curl_multi_socket() and libevent
htmltidy.c - download a document and use libtidy to parse the HTML
htmltitle.cc - download a HTML file and extract the <title> tag from a HTML
page using libxml
http-post.c - HTTP POST
httpput.c - HTTP PUT a local file
https.c - simple HTTPS transfer
multi-app.c - a multi-interface app
multi-debugcallback.c - a multi-interface app using the debug callback
multi-double.c - a multi-interface app doing two simultaneous transfers
multi-post.c - a multi-interface app doing a multipart formpost
multi-single.c - a multi-interface app getting a single file
multithread.c - an example using multi-treading transferring multiple files
opensslthreadlock.c - show how to do locking when using OpenSSL multi-threaded
persistant.c - request two URLs with a persistent connection
post-callback.c - send a HTTP POST using a callback
postit2.c - send a HTTP multipart formpost
sampleconv.c - showing how a program on a non-ASCII platform would invoke
callbacks to do its own codeset conversions instead of using
the built-in iconv functions in libcurl
sepheaders.c - download headers to a separate file
simple.c - the most simple download a URL source
simplepost.c - HTTP POST
simplessl.c - HTTPS example with certificates many options set
synctime.c - Sync local time by extracting date from remote HTTP servers
10-at-a-time.c - Download many files simultaneously, 10 at a time.

View File

@ -2,7 +2,7 @@
# pass files as argument(s)
my $docroot="https://curl.haxx.se/libcurl/c";
my $docroot="http://curl.haxx.se/libcurl/c";
for $f (@ARGV) {
open(NEW, ">$f.new");

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,11 +19,6 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* HTTP PUT upload with authentiction using "any" method. libcurl picks the
* one the server supports/wants.
* </DESC>
*/
#include <stdio.h>
#include <fcntl.h>
#ifdef WIN32
@ -31,8 +26,7 @@
#else
# ifdef __VMS
typedef int intptr_t;
# endif
# if !defined(_AIX) && !defined(__sgi) && !defined(__osf__)
# else
# include <stdint.h>
# endif
# include <unistd.h>
@ -58,12 +52,6 @@
#define TRUE 1
#endif
#if defined(_AIX) || defined(__sgi) || defined(__osf__)
#ifndef intptr_t
#define intptr_t long
#endif
#endif
/*
* This example shows a HTTP PUT operation with authentiction using "any"
* type. It PUTs a file given as a command line argument to the URL also given
@ -78,8 +66,7 @@
/* ioctl callback function */
static curlioerr my_ioctl(CURL *handle, curliocmd cmd, void *userp)
{
int *fdp = (int *)userp;
int fd = *fdp;
intptr_t fd = (intptr_t)userp;
(void)handle; /* not used in here */
@ -101,18 +88,13 @@ static curlioerr my_ioctl(CURL *handle, curliocmd cmd, void *userp)
/* read callback function, fread() look alike */
static size_t read_callback(void *ptr, size_t size, size_t nmemb, void *stream)
{
ssize_t retcode;
curl_off_t nread;
size_t retcode;
int *fdp = (int *)stream;
int fd = *fdp;
intptr_t fd = (intptr_t)stream;
retcode = read(fd, ptr, size * nmemb);
nread = (curl_off_t)retcode;
fprintf(stderr, "*** We read %" CURL_FORMAT_CURL_OFF_T
" bytes from file\n", nread);
fprintf(stderr, "*** We read %d bytes from file\n", retcode);
return retcode;
}
@ -121,7 +103,7 @@ int main(int argc, char **argv)
{
CURL *curl;
CURLcode res;
int hd;
intptr_t hd ;
struct stat file_info;
char *file;
@ -134,7 +116,7 @@ int main(int argc, char **argv)
url = argv[2];
/* get the file size of the local file */
hd = open(file, O_RDONLY);
hd = open(file, O_RDONLY) ;
fstat(hd, &file_info);
/* In windows, this will init the winsock stuff */
@ -147,20 +129,20 @@ int main(int argc, char **argv)
curl_easy_setopt(curl, CURLOPT_READFUNCTION, read_callback);
/* which file to upload */
curl_easy_setopt(curl, CURLOPT_READDATA, (void*)&hd);
curl_easy_setopt(curl, CURLOPT_READDATA, (void*)hd);
/* set the ioctl function */
curl_easy_setopt(curl, CURLOPT_IOCTLFUNCTION, my_ioctl);
/* pass the file descriptor to the ioctl callback as well */
curl_easy_setopt(curl, CURLOPT_IOCTLDATA, (void*)&hd);
curl_easy_setopt(curl, CURLOPT_IOCTLDATA, (void*)hd);
/* enable "uploading" (which means PUT when doing HTTP) */
curl_easy_setopt(curl, CURLOPT_UPLOAD, 1L);
curl_easy_setopt(curl, CURLOPT_UPLOAD, 1L) ;
/* specify target URL, and note that this URL should also include a file
name, not only a directory (as you can do with GTP uploads) */
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl,CURLOPT_URL, url);
/* and give the size of the upload, this supports large file sizes
on systems that have general support for it */
@ -177,10 +159,6 @@ int main(int argc, char **argv)
/* Now run off and do what you've been told! */
res = curl_easy_perform(curl);
/* Check for errors */
if(res != CURLE_OK)
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
/* always cleanup */
curl_easy_cleanup(curl);

View File

@ -1,467 +0,0 @@
/***************************************************************************
* _ _ ____ _
* Project ___| | | | _ \| |
* / __| | | | |_) | |
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 2012 - 2015, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
* furnished to do so, under the terms of the COPYING file.
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* demonstrate the use of multi socket interface with boost::asio
* </DESC>
*/
/*
* This program is in c++ and uses boost::asio instead of libevent/libev.
* Requires boost::asio, boost::bind and boost::system
*
* This is an adaptation of libcurl's "hiperfifo.c" and "evhiperfifo.c"
* sample programs. This example implements a subset of the functionality from
* hiperfifo.c, for full functionality refer hiperfifo.c or evhiperfifo.c
*
* Written by Lijo Antony based on hiperfifo.c by Jeff Pohlmeyer
*
* When running, the program creates an easy handle for a URL and
* uses the curl_multi API to fetch it.
*
* Note:
* For the sake of simplicity, URL is hard coded to "www.google.com"
*
* This is purely a demo app, all retrieved data is simply discarded by the write
* callback.
*/
#include <curl/curl.h>
#include <boost/asio.hpp>
#include <boost/bind.hpp>
#include <iostream>
#define MSG_OUT stdout /* Send info to stdout, change to stderr if you want */
/* boost::asio related objects
* using global variables for simplicity
*/
boost::asio::io_service io_service;
boost::asio::deadline_timer timer(io_service);
std::map<curl_socket_t, boost::asio::ip::tcp::socket *> socket_map;
/* Global information, common to all connections */
typedef struct _GlobalInfo
{
CURLM *multi;
int still_running;
} GlobalInfo;
/* Information associated with a specific easy handle */
typedef struct _ConnInfo
{
CURL *easy;
char *url;
GlobalInfo *global;
char error[CURL_ERROR_SIZE];
} ConnInfo;
static void timer_cb(const boost::system::error_code & error, GlobalInfo *g);
/* Update the event timer after curl_multi library calls */
static int multi_timer_cb(CURLM *multi, long timeout_ms, GlobalInfo *g)
{
fprintf(MSG_OUT, "\nmulti_timer_cb: timeout_ms %ld", timeout_ms);
/* cancel running timer */
timer.cancel();
if(timeout_ms > 0)
{
/* update timer */
timer.expires_from_now(boost::posix_time::millisec(timeout_ms));
timer.async_wait(boost::bind(&timer_cb, _1, g));
}
else
{
/* call timeout function immediately */
boost::system::error_code error; /*success*/
timer_cb(error, g);
}
return 0;
}
/* Die if we get a bad CURLMcode somewhere */
static void mcode_or_die(const char *where, CURLMcode code)
{
if(CURLM_OK != code)
{
const char *s;
switch(code)
{
case CURLM_CALL_MULTI_PERFORM:
s = "CURLM_CALL_MULTI_PERFORM";
break;
case CURLM_BAD_HANDLE:
s = "CURLM_BAD_HANDLE";
break;
case CURLM_BAD_EASY_HANDLE:
s = "CURLM_BAD_EASY_HANDLE";
break;
case CURLM_OUT_OF_MEMORY:
s = "CURLM_OUT_OF_MEMORY";
break;
case CURLM_INTERNAL_ERROR:
s = "CURLM_INTERNAL_ERROR";
break;
case CURLM_UNKNOWN_OPTION:
s = "CURLM_UNKNOWN_OPTION";
break;
case CURLM_LAST:
s = "CURLM_LAST";
break;
default:
s = "CURLM_unknown";
break;
case CURLM_BAD_SOCKET:
s = "CURLM_BAD_SOCKET";
fprintf(MSG_OUT, "\nERROR: %s returns %s", where, s);
/* ignore this error */
return;
}
fprintf(MSG_OUT, "\nERROR: %s returns %s", where, s);
exit(code);
}
}
/* Check for completed transfers, and remove their easy handles */
static void check_multi_info(GlobalInfo *g)
{
char *eff_url;
CURLMsg *msg;
int msgs_left;
ConnInfo *conn;
CURL *easy;
CURLcode res;
fprintf(MSG_OUT, "\nREMAINING: %d", g->still_running);
while((msg = curl_multi_info_read(g->multi, &msgs_left)))
{
if(msg->msg == CURLMSG_DONE)
{
easy = msg->easy_handle;
res = msg->data.result;
curl_easy_getinfo(easy, CURLINFO_PRIVATE, &conn);
curl_easy_getinfo(easy, CURLINFO_EFFECTIVE_URL, &eff_url);
fprintf(MSG_OUT, "\nDONE: %s => (%d) %s", eff_url, res, conn->error);
curl_multi_remove_handle(g->multi, easy);
free(conn->url);
curl_easy_cleanup(easy);
free(conn);
}
}
}
/* Called by asio when there is an action on a socket */
static void event_cb(GlobalInfo *g, boost::asio::ip::tcp::socket *tcp_socket,
int action)
{
fprintf(MSG_OUT, "\nevent_cb: action=%d", action);
CURLMcode rc;
rc = curl_multi_socket_action(g->multi, tcp_socket->native_handle(), action,
&g->still_running);
mcode_or_die("event_cb: curl_multi_socket_action", rc);
check_multi_info(g);
if(g->still_running <= 0)
{
fprintf(MSG_OUT, "\nlast transfer done, kill timeout");
timer.cancel();
}
}
/* Called by asio when our timeout expires */
static void timer_cb(const boost::system::error_code & error, GlobalInfo *g)
{
if(!error)
{
fprintf(MSG_OUT, "\ntimer_cb: ");
CURLMcode rc;
rc = curl_multi_socket_action(g->multi, CURL_SOCKET_TIMEOUT, 0, &g->still_running);
mcode_or_die("timer_cb: curl_multi_socket_action", rc);
check_multi_info(g);
}
}
/* Clean up any data */
static void remsock(int *f, GlobalInfo *g)
{
fprintf(MSG_OUT, "\nremsock: ");
if(f)
{
free(f);
}
}
static void setsock(int *fdp, curl_socket_t s, CURL*e, int act, GlobalInfo*g)
{
fprintf(MSG_OUT, "\nsetsock: socket=%d, act=%d, fdp=%p", s, act, fdp);
std::map<curl_socket_t, boost::asio::ip::tcp::socket *>::iterator it = socket_map.find(s);
if(it == socket_map.end())
{
fprintf(MSG_OUT, "\nsocket %d is a c-ares socket, ignoring", s);
return;
}
boost::asio::ip::tcp::socket * tcp_socket = it->second;
*fdp = act;
if(act == CURL_POLL_IN)
{
fprintf(MSG_OUT, "\nwatching for socket to become readable");
tcp_socket->async_read_some(boost::asio::null_buffers(),
boost::bind(&event_cb, g, tcp_socket, act));
}
else if (act == CURL_POLL_OUT)
{
fprintf(MSG_OUT, "\nwatching for socket to become writable");
tcp_socket->async_write_some(boost::asio::null_buffers(),
boost::bind(&event_cb, g, tcp_socket, act));
}
else if(act == CURL_POLL_INOUT)
{
fprintf(MSG_OUT, "\nwatching for socket to become readable & writable");
tcp_socket->async_read_some(boost::asio::null_buffers(),
boost::bind(&event_cb, g, tcp_socket, act));
tcp_socket->async_write_some(boost::asio::null_buffers(),
boost::bind(&event_cb, g, tcp_socket, act));
}
}
static void addsock(curl_socket_t s, CURL *easy, int action, GlobalInfo *g)
{
/* fdp is used to store current action */
int *fdp = (int *) calloc(sizeof(int), 1);
setsock(fdp, s, easy, action, g);
curl_multi_assign(g->multi, s, fdp);
}
/* CURLMOPT_SOCKETFUNCTION */
static int sock_cb(CURL *e, curl_socket_t s, int what, void *cbp, void *sockp)
{
fprintf(MSG_OUT, "\nsock_cb: socket=%d, what=%d, sockp=%p", s, what, sockp);
GlobalInfo *g = (GlobalInfo*) cbp;
int *actionp = (int *) sockp;
const char *whatstr[] = { "none", "IN", "OUT", "INOUT", "REMOVE"};
fprintf(MSG_OUT,
"\nsocket callback: s=%d e=%p what=%s ", s, e, whatstr[what]);
if(what == CURL_POLL_REMOVE)
{
fprintf(MSG_OUT, "\n");
remsock(actionp, g);
}
else
{
if(!actionp)
{
fprintf(MSG_OUT, "\nAdding data: %s", whatstr[what]);
addsock(s, e, what, g);
}
else
{
fprintf(MSG_OUT,
"\nChanging action from %s to %s",
whatstr[*actionp], whatstr[what]);
setsock(actionp, s, e, what, g);
}
}
return 0;
}
/* CURLOPT_WRITEFUNCTION */
static size_t write_cb(void *ptr, size_t size, size_t nmemb, void *data)
{
size_t written = size * nmemb;
char* pBuffer = (char *) malloc(written + 1);
strncpy(pBuffer, (const char *)ptr, written);
pBuffer[written] = '\0';
fprintf(MSG_OUT, "%s", pBuffer);
free(pBuffer);
return written;
}
/* CURLOPT_PROGRESSFUNCTION */
static int prog_cb(void *p, double dltotal, double dlnow, double ult,
double uln)
{
ConnInfo *conn = (ConnInfo *)p;
(void)ult;
(void)uln;
fprintf(MSG_OUT, "\nProgress: %s (%g/%g)", conn->url, dlnow, dltotal);
fprintf(MSG_OUT, "\nProgress: %s (%g)", conn->url, ult);
return 0;
}
/* CURLOPT_OPENSOCKETFUNCTION */
static curl_socket_t opensocket(void *clientp, curlsocktype purpose,
struct curl_sockaddr *address)
{
fprintf(MSG_OUT, "\nopensocket :");
curl_socket_t sockfd = CURL_SOCKET_BAD;
/* restrict to IPv4 */
if(purpose == CURLSOCKTYPE_IPCXN && address->family == AF_INET)
{
/* create a tcp socket object */
boost::asio::ip::tcp::socket *tcp_socket = new boost::asio::ip::tcp::socket(io_service);
/* open it and get the native handle*/
boost::system::error_code ec;
tcp_socket->open(boost::asio::ip::tcp::v4(), ec);
if(ec)
{
/* An error occurred */
std::cout << std::endl << "Couldn't open socket [" << ec << "][" << ec.message() << "]";
fprintf(MSG_OUT, "\nERROR: Returning CURL_SOCKET_BAD to signal error");
}
else
{
sockfd = tcp_socket->native_handle();
fprintf(MSG_OUT, "\nOpened socket %d", sockfd);
/* save it for monitoring */
socket_map.insert(std::pair<curl_socket_t, boost::asio::ip::tcp::socket *>(sockfd, tcp_socket));
}
}
return sockfd;
}
/* CURLOPT_CLOSESOCKETFUNCTION */
static int close_socket(void *clientp, curl_socket_t item)
{
fprintf(MSG_OUT, "\nclose_socket : %d", item);
std::map<curl_socket_t, boost::asio::ip::tcp::socket *>::iterator it = socket_map.find(item);
if(it != socket_map.end())
{
delete it->second;
socket_map.erase(it);
}
return 0;
}
/* Create a new easy handle, and add it to the global curl_multi */
static void new_conn(char *url, GlobalInfo *g)
{
ConnInfo *conn;
CURLMcode rc;
conn = (ConnInfo *) calloc(1, sizeof(ConnInfo));
conn->easy = curl_easy_init();
if(!conn->easy)
{
fprintf(MSG_OUT, "\ncurl_easy_init() failed, exiting!");
exit(2);
}
conn->global = g;
conn->url = strdup(url);
curl_easy_setopt(conn->easy, CURLOPT_URL, conn->url);
curl_easy_setopt(conn->easy, CURLOPT_WRITEFUNCTION, write_cb);
curl_easy_setopt(conn->easy, CURLOPT_WRITEDATA, &conn);
curl_easy_setopt(conn->easy, CURLOPT_VERBOSE, 1L);
curl_easy_setopt(conn->easy, CURLOPT_ERRORBUFFER, conn->error);
curl_easy_setopt(conn->easy, CURLOPT_PRIVATE, conn);
curl_easy_setopt(conn->easy, CURLOPT_NOPROGRESS, 1L);
curl_easy_setopt(conn->easy, CURLOPT_PROGRESSFUNCTION, prog_cb);
curl_easy_setopt(conn->easy, CURLOPT_PROGRESSDATA, conn);
curl_easy_setopt(conn->easy, CURLOPT_LOW_SPEED_TIME, 3L);
curl_easy_setopt(conn->easy, CURLOPT_LOW_SPEED_LIMIT, 10L);
/* call this function to get a socket */
curl_easy_setopt(conn->easy, CURLOPT_OPENSOCKETFUNCTION, opensocket);
/* call this function to close a socket */
curl_easy_setopt(conn->easy, CURLOPT_CLOSESOCKETFUNCTION, close_socket);
fprintf(MSG_OUT,
"\nAdding easy %p to multi %p (%s)", conn->easy, g->multi, url);
rc = curl_multi_add_handle(g->multi, conn->easy);
mcode_or_die("new_conn: curl_multi_add_handle", rc);
/* note that the add_handle() will set a time-out to trigger very soon so
that the necessary socket_action() call will be called by this app */
}
int main(int argc, char **argv)
{
GlobalInfo g;
(void)argc;
(void)argv;
memset(&g, 0, sizeof(GlobalInfo));
g.multi = curl_multi_init();
curl_multi_setopt(g.multi, CURLMOPT_SOCKETFUNCTION, sock_cb);
curl_multi_setopt(g.multi, CURLMOPT_SOCKETDATA, &g);
curl_multi_setopt(g.multi, CURLMOPT_TIMERFUNCTION, multi_timer_cb);
curl_multi_setopt(g.multi, CURLMOPT_TIMERDATA, &g);
new_conn((char *)"www.google.com", &g); /* add a URL */
/* enter io_service run loop */
io_service.run();
curl_multi_cleanup(g.multi);
fprintf(MSG_OUT, "\ndone.\n");
return 0;
}

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,19 +19,25 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* CA cert in memory with OpenSSL to get a HTTPS page.
* </DESC>
/* Example using a "in core" PEM certificate to retrieve a https page.
* Written by Theo Borm
*/
/* on a netBSD system with OPENSSL& LIBCURL installed from
* pkgsrc (using default paths) this program can be compiled using:
* gcc -I/usr/pkg/include -L/usr/pkg/lib -lcurl -Wl,-R/usr/pkg/lib -lssl
* -lcrypto -lz -o curlcacerttest curlcacerttest.c
* on other operating systems you may want to change paths to headers
* and libraries
*/
#include <openssl/ssl.h>
#include <curl/curl.h>
#include <stdio.h>
size_t writefunction( void *ptr, size_t size, size_t nmemb, void *stream)
{
fwrite(ptr, size, nmemb, stream);
return (nmemb*size);
fwrite(ptr,size,nmemb,stream);
return(nmemb*size);
}
static CURLcode sslctx_function(CURL * curl, void * sslctx, void * parm)
@ -87,22 +93,18 @@ static CURLcode sslctx_function(CURL * curl, void * sslctx, void * parm)
* structure that SSL can use
*/
PEM_read_bio_X509(bio, &cert, 0, NULL);
if(cert == NULL)
if (cert == NULL)
printf("PEM_read_bio_X509 failed...\n");
/* get a pointer to the X509 certificate store (which may be empty!) */
store=SSL_CTX_get_cert_store((SSL_CTX *)sslctx);
/* add our certificate to this store */
if(X509_STORE_add_cert(store, cert)==0)
if (X509_STORE_add_cert(store, cert)==0)
printf("error adding certificate\n");
/* decrease reference counts */
X509_free(cert);
BIO_free(bio);
/* all set to go */
return CURLE_OK;
return CURLE_OK ;
}
int main(void)
@ -112,22 +114,22 @@ int main(void)
rv=curl_global_init(CURL_GLOBAL_ALL);
ch=curl_easy_init();
rv=curl_easy_setopt(ch, CURLOPT_VERBOSE, 0L);
rv=curl_easy_setopt(ch, CURLOPT_HEADER, 0L);
rv=curl_easy_setopt(ch, CURLOPT_NOPROGRESS, 1L);
rv=curl_easy_setopt(ch, CURLOPT_NOSIGNAL, 1L);
rv=curl_easy_setopt(ch, CURLOPT_WRITEFUNCTION, *writefunction);
rv=curl_easy_setopt(ch, CURLOPT_WRITEDATA, stdout);
rv=curl_easy_setopt(ch, CURLOPT_HEADERFUNCTION, *writefunction);
rv=curl_easy_setopt(ch, CURLOPT_HEADERDATA, stderr);
rv=curl_easy_setopt(ch, CURLOPT_SSLCERTTYPE, "PEM");
rv=curl_easy_setopt(ch, CURLOPT_SSL_VERIFYPEER, 1L);
rv=curl_easy_setopt(ch,CURLOPT_VERBOSE, 0L);
rv=curl_easy_setopt(ch,CURLOPT_HEADER, 0L);
rv=curl_easy_setopt(ch,CURLOPT_NOPROGRESS, 1L);
rv=curl_easy_setopt(ch,CURLOPT_NOSIGNAL, 1L);
rv=curl_easy_setopt(ch,CURLOPT_WRITEFUNCTION, *writefunction);
rv=curl_easy_setopt(ch,CURLOPT_WRITEDATA, stdout);
rv=curl_easy_setopt(ch,CURLOPT_HEADERFUNCTION, *writefunction);
rv=curl_easy_setopt(ch,CURLOPT_WRITEHEADER, stderr);
rv=curl_easy_setopt(ch,CURLOPT_SSLCERTTYPE,"PEM");
rv=curl_easy_setopt(ch,CURLOPT_SSL_VERIFYPEER,1L);
rv=curl_easy_setopt(ch, CURLOPT_URL, "https://www.example.com/");
/* first try: retrieve page without cacerts' certificate -> will fail
*/
rv=curl_easy_perform(ch);
if(rv==CURLE_OK)
if (rv==CURLE_OK)
printf("*** transfer succeeded ***\n");
else
printf("*** transfer failed ***\n");
@ -136,9 +138,9 @@ int main(void)
* load the certificate by installing a function doing the nescessary
* "modifications" to the SSL CONTEXT just before link init
*/
rv=curl_easy_setopt(ch, CURLOPT_SSL_CTX_FUNCTION, *sslctx_function);
rv=curl_easy_setopt(ch,CURLOPT_SSL_CTX_FUNCTION, *sslctx_function);
rv=curl_easy_perform(ch);
if(rv==CURLE_OK)
if (rv==CURLE_OK)
printf("*** transfer succeeded ***\n");
else
printf("*** transfer failed ***\n");

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2015, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,13 +19,11 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* Extract lots of TLS certificate info.
* </DESC>
*/
#include <stdio.h>
#include <curl/curl.h>
#include <curl/types.h>
#include <curl/easy.h>
static size_t wrfu(void *ptr, size_t size, size_t nmemb, void *stream)
{
@ -56,24 +54,18 @@ int main(void)
res = curl_easy_perform(curl);
if(!res) {
union {
struct curl_slist *to_info;
struct curl_certinfo *to_certinfo;
} ptr;
struct curl_certinfo *ci = NULL;
ptr.to_info = NULL;
res = curl_easy_getinfo(curl, CURLINFO_CERTINFO, &ci);
res = curl_easy_getinfo(curl, CURLINFO_CERTINFO, &ptr.to_info);
if(!res && ptr.to_info) {
if(!res && ci) {
int i;
printf("%d certs!\n", ci->num_of_certs);
printf("%d certs!\n", ptr.to_certinfo->num_of_certs);
for(i = 0; i < ptr.to_certinfo->num_of_certs; i++) {
for(i=0; i<ci->num_of_certs; i++) {
struct curl_slist *slist;
for(slist = ptr.to_certinfo->certinfo[i]; slist; slist = slist->next)
for(slist = ci->certinfo[i]; slist; slist = slist->next)
printf("%s\n", slist->data);
}
@ -81,6 +73,7 @@ int main(void)
}
curl_easy_cleanup(curl);
}

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,10 +19,6 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* Show transfer timing info after download completes.
* </DESC>
*/
/* Example source code to show how the callback function can be used to
* download data into a chunk of memory instead of storing it in a file.
* After successful download we use curl_easy_getinfo() calls to get the
@ -39,6 +35,8 @@
#include <time.h>
#include <curl/curl.h>
#include <curl/types.h>
#include <curl/easy.h>
#define URL_BASE "http://speedtest.your.domain/"
#define URL_1M URL_BASE "file_1M.bin"
@ -64,82 +62,65 @@ int main(int argc, char *argv[])
{
CURL *curl_handle;
CURLcode res;
int prtall = 0, prtsep = 0, prttime = 0;
int prtsep = 0, prttime = 0;
const char *url = URL_1M;
char *appname = argv[0];
if(argc > 1) {
if (argc > 1) {
/* parse input parameters */
for(argc--, argv++; *argv; argc--, argv++) {
if(strncasecmp(*argv, "-", 1) == 0) {
if(strncasecmp(*argv, "-H", 2) == 0) {
for (argc--, argv++; *argv; argc--, argv++) {
if (strncasecmp(*argv, "-", 1) == 0) {
if (strncasecmp(*argv, "-H", 2) == 0) {
fprintf(stderr,
"\rUsage: %s [-m=1|2|5|10|20|50|100] [-t] [-x] [url]\n",
appname);
exit(1);
}
else if(strncasecmp(*argv, "-V", 2) == 0) {
} else if (strncasecmp(*argv, "-V", 2) == 0) {
fprintf(stderr, "\r%s %s - %s\n",
appname, CHKSPEED_VERSION, curl_version());
exit(1);
}
else if(strncasecmp(*argv, "-A", 2) == 0) {
prtall = 1;
}
else if(strncasecmp(*argv, "-X", 2) == 0) {
} else if (strncasecmp(*argv, "-X", 2) == 0) {
prtsep = 1;
}
else if(strncasecmp(*argv, "-T", 2) == 0) {
} else if (strncasecmp(*argv, "-T", 2) == 0) {
prttime = 1;
}
else if(strncasecmp(*argv, "-M=", 3) == 0) {
} else if (strncasecmp(*argv, "-M=", 3) == 0) {
long m = strtol((*argv)+3, NULL, 10);
switch(m) {
case 1:
url = URL_1M;
break;
case 2:
url = URL_2M;
break;
case 5:
url = URL_5M;
break;
case 10:
url = URL_10M;
break;
case 20:
url = URL_20M;
break;
case 50:
url = URL_50M;
break;
case 100:
url = URL_100M;
break;
default:
fprintf(stderr, "\r%s: invalid parameter %s\n",
appname, *argv + 3);
exit(1);
case 1: url = URL_1M;
break;
case 2: url = URL_2M;
break;
case 5: url = URL_5M;
break;
case 10: url = URL_10M;
break;
case 20: url = URL_20M;
break;
case 50: url = URL_50M;
break;
case 100: url = URL_100M;
break;
default: fprintf(stderr, "\r%s: invalid parameter %s\n",
appname, *argv + 3);
exit(1);
}
}
else {
} else {
fprintf(stderr, "\r%s: invalid or unknown option %s\n",
appname, *argv);
exit(1);
}
}
else {
} else {
url = *argv;
}
}
}
/* print separator line */
if(prtsep) {
if (prtsep) {
printf("-------------------------------------------------\n");
}
/* print localtime */
if(prttime) {
if (prttime) {
time_t t = time(NULL);
printf("Localtime: %s", ctime(&t));
}
@ -182,19 +163,7 @@ int main(int argc, char *argv[])
if((CURLE_OK == res) && (val>0))
printf("Average download speed: %0.3f kbyte/sec.\n", val / 1024);
if(prtall) {
/* check for name resolution time */
res = curl_easy_getinfo(curl_handle, CURLINFO_NAMELOOKUP_TIME, &val);
if((CURLE_OK == res) && (val>0))
printf("Name lookup time: %0.3f sec.\n", val);
/* check for connect time */
res = curl_easy_getinfo(curl_handle, CURLINFO_CONNECT_TIME, &val);
if((CURLE_OK == res) && (val>0))
printf("Connect time: %0.3f sec.\n", val);
}
}
else {
} else {
fprintf(stderr, "Error while fetching '%s' : %s\n",
url, curl_easy_strerror(res));
}

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,10 +19,7 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* Import and export cookies with COOKIELIST.
* </DESC>
*/
/* This example shows usage of simple cookie interface. */
#include <stdio.h>
#include <string.h>
@ -42,18 +39,17 @@ print_cookies(CURL *curl)
printf("Cookies, curl knows:\n");
res = curl_easy_getinfo(curl, CURLINFO_COOKIELIST, &cookies);
if(res != CURLE_OK) {
fprintf(stderr, "Curl curl_easy_getinfo failed: %s\n",
curl_easy_strerror(res));
if (res != CURLE_OK) {
fprintf(stderr, "Curl curl_easy_getinfo failed: %s\n", curl_easy_strerror(res));
exit(1);
}
nc = cookies, i = 1;
while(nc) {
while (nc) {
printf("[%d]: %s\n", i, nc->data);
nc = nc->next;
i++;
}
if(i == 1) {
if (i == 1) {
printf("(none)\n");
}
curl_slist_free_all(cookies);
@ -67,14 +63,14 @@ main(void)
curl_global_init(CURL_GLOBAL_ALL);
curl = curl_easy_init();
if(curl) {
if (curl) {
char nline[256];
curl_easy_setopt(curl, CURLOPT_URL, "http://www.example.com/");
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);
curl_easy_setopt(curl, CURLOPT_COOKIEFILE, ""); /* start cookie engine */
curl_easy_setopt(curl, CURLOPT_COOKIEFILE, ""); /* just to start the cookie engine */
res = curl_easy_perform(curl);
if(res != CURLE_OK) {
if (res != CURLE_OK) {
fprintf(stderr, "Curl perform failed: %s\n", curl_easy_strerror(res));
return 1;
}
@ -93,41 +89,30 @@ main(void)
#endif
/* Netscape format cookie */
snprintf(nline, sizeof(nline), "%s\t%s\t%s\t%s\t%lu\t%s\t%s",
".google.com", "TRUE", "/", "FALSE",
(unsigned long)time(NULL) + 31337UL,
"PREF", "hello google, i like you very much!");
".google.com", "TRUE", "/", "FALSE", time(NULL) + 31337, "PREF", "hello google, i like you very much!");
res = curl_easy_setopt(curl, CURLOPT_COOKIELIST, nline);
if(res != CURLE_OK) {
fprintf(stderr, "Curl curl_easy_setopt failed: %s\n",
curl_easy_strerror(res));
if (res != CURLE_OK) {
fprintf(stderr, "Curl curl_easy_setopt failed: %s\n", curl_easy_strerror(res));
return 1;
}
/* HTTP-header style cookie. If you use the Set-Cookie format and don't
specify a domain then the cookie is sent for any domain and will not be
modified, likely not what you intended. Starting in 7.43.0 any-domain
cookies will not be exported either. For more information refer to the
CURLOPT_COOKIELIST documentation.
*/
/* HTTP-header style cookie */
snprintf(nline, sizeof(nline),
"Set-Cookie: OLD_PREF=3d141414bf4209321; "
"expires=Sun, 17-Jan-2038 19:14:07 GMT; path=/; domain=.google.com");
res = curl_easy_setopt(curl, CURLOPT_COOKIELIST, nline);
if(res != CURLE_OK) {
fprintf(stderr, "Curl curl_easy_setopt failed: %s\n",
curl_easy_strerror(res));
if (res != CURLE_OK) {
fprintf(stderr, "Curl curl_easy_setopt failed: %s\n", curl_easy_strerror(res));
return 1;
}
print_cookies(curl);
res = curl_easy_perform(curl);
if(res != CURLE_OK) {
if (res != CURLE_OK) {
fprintf(stderr, "Curl perform failed: %s\n", curl_easy_strerror(res));
return 1;
}
curl_easy_cleanup(curl);
}
else {
fprintf(stderr, "Curl init failed!\n");

View File

@ -5,17 +5,16 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (c) 2000 David Odin (aka DindinX) for MandrakeSoft
*/
/* <DESC>
* use the libcurl in a gtk-threaded application
* </DESC>
*/
/* Copyright (c) 2000 David Odin (aka DindinX) for MandrakeSoft */
/* an attempt to use the curl library in concert with a gtk-threaded application */
#include <stdio.h>
#include <gtk/gtk.h>
#include <curl/curl.h>
#include <curl/types.h> /* new for v7 */
#include <curl/easy.h> /* new for v7 */
GtkWidget *Bar;
@ -50,9 +49,9 @@ void *my_thread(void *ptr)
gchar *url = ptr;
curl = curl_easy_init();
if(curl) {
const char *filename = "test.curl";
outfile = fopen(filename, "wb");
if(curl)
{
outfile = fopen("test.curl", "w");
curl_easy_setopt(curl, CURLOPT_URL, url);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, outfile);
@ -97,7 +96,7 @@ int main(int argc, char **argv)
gtk_container_add(GTK_CONTAINER(Frame2), Bar);
gtk_widget_show_all(Window);
if(!g_thread_create(&my_thread, argv[1], FALSE, NULL) != 0)
if (!g_thread_create(&my_thread, argv[1], FALSE, NULL) != 0)
g_warning("can't create the thread");

View File

@ -9,10 +9,7 @@
certificate presented during ssl session establishment.
*/
/* <DESC>
* demonstrates use of SSL context callback, requires OpenSSL
* </DESC>
*/
/*
* Copyright (c) 2003 The OpenEvidence Project. All rights reserved.
@ -36,7 +33,7 @@
* "This product includes software developed by the Openevidence Project
* for use in the OpenEvidence Toolkit. (http://www.openevidence.org/)"
* This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit (https://www.openssl.org/)"
* for use in the OpenSSL Toolkit (http://www.openssl.org/)"
* This product includes cryptographic software written by Eric Young
* (eay@cryptsoft.com). This product includes software written by Tim
* Hudson (tjh@cryptsoft.com)."
@ -55,7 +52,7 @@
* "This product includes software developed by the OpenEvidence Project
* for use in the OpenEvidence Toolkit (http://www.openevidence.org/)
* This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit (https://www.openssl.org/)"
* for use in the OpenSSL Toolkit (http://www.openssl.org/)"
* This product includes cryptographic software written by Eric Young
* (eay@cryptsoft.com). This product includes software written by Tim
* Hudson (tjh@cryptsoft.com)."
@ -75,7 +72,7 @@
* ====================================================================
*
* This product includes software developed by the OpenSSL Project
* for use in the OpenSSL Toolkit (https://www.openssl.org/)
* for use in the OpenSSL Toolkit (http://www.openssl.org/)
* This product includes cryptographic software written by Eric Young
* (eay@cryptsoft.com). This product includes software written by Tim
* Hudson (tjh@cryptsoft.com).
@ -101,18 +98,13 @@
static const char *curlx_usage[]={
"usage: curlx args\n",
" -p12 arg - tia file ",
" -envpass arg - environement variable which content the tia private"
" key password",
" -envpass arg - environement variable which content the tia private key password",
" -out arg - output file (response)- default stdout",
" -in arg - input file (request)- default stdin",
" -connect arg - URL of the server for the connection ex:"
" www.openevidence.org",
" -mimetype arg - MIME type for data in ex : application/timestamp-query"
" or application/dvcs -default application/timestamp-query",
" -acceptmime arg - MIME type acceptable for the response ex : "
"application/timestamp-response or application/dvcs -default none",
" -accesstype arg - an Object identifier in an AIA/SIA method, e.g."
" AD_DVCS or ad_timestamping",
" -connect arg - URL of the server for the connection ex: www.openevidence.org",
" -mimetype arg - MIME type for data in ex : application/timestamp-query or application/dvcs -default application/timestamp-query",
" -acceptmime arg - MIME type acceptable for the response ex : application/timestamp-response or application/dvcs -default none",
" -accesstype arg - an Object identifier in an AIA/SIA method, e.g. AD_DVCS or ad_timestamping",
NULL
};
@ -133,22 +125,22 @@ static const char *curlx_usage[]={
/* This is a context that we pass to all callbacks */
typedef struct sslctxparm_st {
unsigned char * p12file;
const char * pst;
PKCS12 * p12;
EVP_PKEY * pkey;
X509 * usercert;
STACK_OF(X509) * ca;
unsigned char * p12file ;
const char * pst ;
PKCS12 * p12 ;
EVP_PKEY * pkey ;
X509 * usercert ;
STACK_OF(X509) * ca ;
CURL * curl;
BIO * errorbio;
int accesstype;
int accesstype ;
int verbose;
} sslctxparm;
/* some helper function. */
static char *ia5string(ASN1_IA5STRING *ia5)
static char *i2s_ASN1_IA5STRING( ASN1_IA5STRING *ia5)
{
char *tmp;
if(!ia5 || !ia5->length)
@ -160,20 +152,20 @@ static char *ia5string(ASN1_IA5STRING *ia5)
}
/* A conveniance routine to get an access URI. */
static unsigned char *my_get_ext(X509 *cert, const int type,
int extensiontype)
{
int i;
STACK_OF(ACCESS_DESCRIPTION) * accessinfo;
accessinfo = X509_get_ext_d2i(cert, extensiontype, NULL, NULL);
if(!sk_ACCESS_DESCRIPTION_num(accessinfo))
static unsigned char *my_get_ext(X509 * cert, const int type, int extensiontype) {
int i;
STACK_OF(ACCESS_DESCRIPTION) * accessinfo ;
accessinfo = X509_get_ext_d2i(cert, extensiontype, NULL, NULL) ;
if (!sk_ACCESS_DESCRIPTION_num(accessinfo))
return NULL;
for(i = 0; i < sk_ACCESS_DESCRIPTION_num(accessinfo); i++) {
for (i = 0; i < sk_ACCESS_DESCRIPTION_num(accessinfo); i++) {
ACCESS_DESCRIPTION * ad = sk_ACCESS_DESCRIPTION_value(accessinfo, i);
if(OBJ_obj2nid(ad->method) == type) {
if(ad->location->type == GEN_URI) {
return ia5string(ad->location->d.ia5);
if (OBJ_obj2nid(ad->method) == type) {
if (ad->location->type == GEN_URI) {
return i2s_ASN1_IA5STRING(ad->location->d.ia5);
}
return NULL;
}
@ -192,86 +184,85 @@ static int ssl_app_verify_callback(X509_STORE_CTX *ctx, void *arg)
sslctxparm * p = (sslctxparm *) arg;
int ok;
if(p->verbose > 2)
BIO_printf(p->errorbio, "entering ssl_app_verify_callback\n");
if (p->verbose > 2)
BIO_printf(p->errorbio,"entering ssl_app_verify_callback\n");
if((ok= X509_verify_cert(ctx)) && ctx->cert) {
unsigned char * accessinfo;
if(p->verbose > 1)
X509_print_ex(p->errorbio, ctx->cert, 0, 0);
if ((ok= X509_verify_cert(ctx)) && ctx->cert) {
unsigned char * accessinfo ;
if (p->verbose > 1)
X509_print_ex(p->errorbio,ctx->cert,0,0);
if(accessinfo = my_get_ext(ctx->cert, p->accesstype, NID_sinfo_access)) {
if(p->verbose)
BIO_printf(p->errorbio, "Setting URL from SIA to: %s\n", accessinfo);
if (accessinfo = my_get_ext(ctx->cert,p->accesstype ,NID_sinfo_access)) {
if (p->verbose)
BIO_printf(p->errorbio,"Setting URL from SIA to: %s\n", accessinfo);
curl_easy_setopt(p->curl, CURLOPT_URL, accessinfo);
curl_easy_setopt(p->curl, CURLOPT_URL,accessinfo);
}
else if(accessinfo = my_get_ext(ctx->cert, p->accesstype,
NID_info_access)) {
if(p->verbose)
BIO_printf(p->errorbio, "Setting URL from AIA to: %s\n", accessinfo);
else if (accessinfo = my_get_ext(ctx->cert,p->accesstype,
NID_info_access)) {
if (p->verbose)
BIO_printf(p->errorbio,"Setting URL from AIA to: %s\n", accessinfo);
curl_easy_setopt(p->curl, CURLOPT_URL, accessinfo);
curl_easy_setopt(p->curl, CURLOPT_URL,accessinfo);
}
}
if(p->verbose > 2)
BIO_printf(p->errorbio, "leaving ssl_app_verify_callback with %d\n", ok);
return ok;
if (p->verbose > 2)
BIO_printf(p->errorbio,"leaving ssl_app_verify_callback with %d\n", ok);
return(ok);
}
/* The SSL initialisation callback. The callback sets:
/* This is an example of an curl SSL initialisation call back. The callback sets:
- a private key and certificate
- a trusted ca certificate
- a preferred cipherlist
- an application verification callback (the function above)
*/
static CURLcode sslctxfun(CURL * curl, void * sslctx, void * parm)
{
static CURLcode sslctxfun(CURL * curl, void * sslctx, void * parm) {
sslctxparm * p = (sslctxparm *) parm;
SSL_CTX * ctx = (SSL_CTX *) sslctx;
SSL_CTX * ctx = (SSL_CTX *) sslctx ;
if(!SSL_CTX_use_certificate(ctx, p->usercert)) {
BIO_printf(p->errorbio, "SSL_CTX_use_certificate problem\n");
goto err;
if (!SSL_CTX_use_certificate(ctx,p->usercert)) {
BIO_printf(p->errorbio, "SSL_CTX_use_certificate problem\n"); goto err;
}
if(!SSL_CTX_use_PrivateKey(ctx, p->pkey)) {
BIO_printf(p->errorbio, "SSL_CTX_use_PrivateKey\n");
goto err;
if (!SSL_CTX_use_PrivateKey(ctx,p->pkey)) {
BIO_printf(p->errorbio, "SSL_CTX_use_PrivateKey\n"); goto err;
}
if(!SSL_CTX_check_private_key(ctx)) {
BIO_printf(p->errorbio, "SSL_CTX_check_private_key\n");
goto err;
if (!SSL_CTX_check_private_key(ctx)) {
BIO_printf(p->errorbio, "SSL_CTX_check_private_key\n"); goto err;
}
SSL_CTX_set_quiet_shutdown(ctx, 1);
SSL_CTX_set_cipher_list(ctx, "RC4-MD5");
SSL_CTX_set_quiet_shutdown(ctx,1);
SSL_CTX_set_cipher_list(ctx,"RC4-MD5");
SSL_CTX_set_mode(ctx, SSL_MODE_AUTO_RETRY);
X509_STORE_add_cert(SSL_CTX_get_cert_store(ctx),
sk_X509_value(p->ca, sk_X509_num(p->ca)-1));
X509_STORE_add_cert(ctx->cert_store,sk_X509_value(p->ca,
sk_X509_num(p->ca)-1));
SSL_CTX_set_verify_depth(ctx,2);
SSL_CTX_set_verify(ctx,SSL_VERIFY_PEER,ZERO_NULL);
SSL_CTX_set_verify_depth(ctx, 2);
SSL_CTX_set_verify(ctx, SSL_VERIFY_PEER, ZERO_NULL);
SSL_CTX_set_cert_verify_callback(ctx, ssl_app_verify_callback, parm);
return CURLE_OK;
return CURLE_OK ;
err:
ERR_print_errors(p->errorbio);
return CURLE_SSL_CERTPROBLEM;
}
int main(int argc, char **argv)
{
int main(int argc, char **argv) {
BIO* in=NULL;
BIO* out=NULL;
char * outfile = NULL;
char * infile = NULL;
char * infile = NULL ;
int tabLength=100;
char *binaryptr;
@ -280,7 +271,7 @@ int main(int argc, char **argv)
char* contenttype;
const char** pp;
unsigned char* hostporturl = NULL;
BIO * p12bio;
BIO * p12bio ;
char **args = argv + 1;
unsigned char * serverurl;
sslctxparm p;
@ -303,91 +294,66 @@ int main(int argc, char **argv)
OpenSSL_add_all_digests();
ERR_load_crypto_strings();
while(*args && *args[0] == '-') {
if(!strcmp (*args, "-in")) {
if(args[1]) {
while (*args && *args[0] == '-') {
if (!strcmp (*args, "-in")) {
if (args[1]) {
infile=*(++args);
}
else
badarg=1;
}
else if(!strcmp (*args, "-out")) {
if(args[1]) {
} else badarg=1;
} else if (!strcmp (*args, "-out")) {
if (args[1]) {
outfile=*(++args);
}
else
badarg=1;
}
else if(!strcmp (*args, "-p12")) {
if(args[1]) {
} else badarg=1;
} else if (!strcmp (*args, "-p12")) {
if (args[1]) {
p.p12file = *(++args);
}
else
badarg=1;
}
else if(strcmp(*args, "-envpass") == 0) {
if(args[1]) {
} else badarg=1;
} else if (strcmp(*args,"-envpass") == 0) {
if (args[1]) {
p.pst = getenv(*(++args));
}
else
badarg=1;
}
else if(strcmp(*args, "-connect") == 0) {
if(args[1]) {
} else badarg=1;
} else if (strcmp(*args,"-connect") == 0) {
if (args[1]) {
hostporturl = *(++args);
}
else
badarg=1;
}
else if(strcmp(*args, "-mimetype") == 0) {
if(args[1]) {
} else badarg=1;
} else if (strcmp(*args,"-mimetype") == 0) {
if (args[1]) {
mimetype = *(++args);
}
else
badarg=1;
}
else if(strcmp(*args, "-acceptmime") == 0) {
if(args[1]) {
} else badarg=1;
} else if (strcmp(*args,"-acceptmime") == 0) {
if (args[1]) {
mimetypeaccept = *(++args);
}
else
badarg=1;
}
else if(strcmp(*args, "-accesstype") == 0) {
if(args[1]) {
if((p.accesstype = OBJ_obj2nid(OBJ_txt2obj(*++args, 0))) == 0)
badarg=1;
}
else
badarg=1;
}
else if(strcmp(*args, "-verbose") == 0) {
} else badarg=1;
} else if (strcmp(*args,"-accesstype") == 0) {
if (args[1]) {
if ((p.accesstype = OBJ_obj2nid(OBJ_txt2obj(*++args,0))) == 0) badarg=1;
} else badarg=1;
} else if (strcmp(*args,"-verbose") == 0) {
p.verbose++;
}
else
badarg=1;
} else badarg=1;
args++;
}
if(mimetype==NULL || mimetypeaccept == NULL)
badarg = 1;
if (mimetype==NULL || mimetypeaccept == NULL) badarg = 1;
if(badarg) {
for(pp=curlx_usage; (*pp != NULL); pp++)
BIO_printf(p.errorbio, "%s\n", *pp);
BIO_printf(p.errorbio, "\n");
if (badarg) {
for (pp=curlx_usage; (*pp != NULL); pp++)
BIO_printf(p.errorbio,"%s\n",*pp);
BIO_printf(p.errorbio,"\n");
goto err;
}
/* set input */
if((in=BIO_new(BIO_s_file())) == NULL) {
if ((in=BIO_new(BIO_s_file())) == NULL) {
BIO_printf(p.errorbio, "Error setting input bio\n");
goto err;
}
else if(infile == NULL)
BIO_set_fp(in, stdin, BIO_NOCLOSE|BIO_FP_TEXT);
else if(BIO_read_filename(in, infile) <= 0) {
} else if (infile == NULL)
BIO_set_fp(in,stdin,BIO_NOCLOSE|BIO_FP_TEXT);
else if (BIO_read_filename(in,infile) <= 0) {
BIO_printf(p.errorbio, "Error opening input file %s\n", infile);
BIO_free(in);
goto err;
@ -395,13 +361,12 @@ int main(int argc, char **argv)
/* set output */
if((out=BIO_new(BIO_s_file())) == NULL) {
if ((out=BIO_new(BIO_s_file())) == NULL) {
BIO_printf(p.errorbio, "Error setting output bio.\n");
goto err;
}
else if(outfile == NULL)
BIO_set_fp(out, stdout, BIO_NOCLOSE|BIO_FP_TEXT);
else if(BIO_write_filename(out, outfile) <= 0) {
} else if (outfile == NULL)
BIO_set_fp(out,stdout,BIO_NOCLOSE|BIO_FP_TEXT);
else if (BIO_write_filename(out,outfile) <= 0) {
BIO_printf(p.errorbio, "Error opening output file %s\n", outfile);
BIO_free(out);
goto err;
@ -410,66 +375,62 @@ int main(int argc, char **argv)
p.errorbio = BIO_new_fp (stderr, BIO_NOCLOSE);
if(!(p.curl = curl_easy_init())) {
if (!(p.curl = curl_easy_init())) {
BIO_printf(p.errorbio, "Cannot init curl lib\n");
goto err;
}
if(!(p12bio = BIO_new_file(p.p12file , "rb"))) {
BIO_printf(p.errorbio, "Error opening P12 file %s\n", p.p12file);
goto err;
if (!(p12bio = BIO_new_file(p.p12file , "rb"))) {
BIO_printf(p.errorbio, "Error opening P12 file %s\n", p.p12file); goto err;
}
if(!(p.p12 = d2i_PKCS12_bio (p12bio, NULL))) {
BIO_printf(p.errorbio, "Cannot decode P12 structure %s\n", p.p12file);
goto err;
if (!(p.p12 = d2i_PKCS12_bio (p12bio, NULL))) {
BIO_printf(p.errorbio, "Cannot decode P12 structure %s\n", p.p12file); goto err;
}
p.ca= NULL;
if(!(PKCS12_parse (p.p12, p.pst, &(p.pkey), &(p.usercert), &(p.ca) ) )) {
BIO_printf(p.errorbio, "Invalid P12 structure in %s\n", p.p12file);
goto err;
if (!(PKCS12_parse (p.p12, p.pst, &(p.pkey), &(p.usercert), &(p.ca) ) )) {
BIO_printf(p.errorbio,"Invalid P12 structure in %s\n", p.p12file); goto err;
}
if(sk_X509_num(p.ca) <= 0) {
BIO_printf(p.errorbio, "No trustworthy CA given.%s\n", p.p12file);
goto err;
if (sk_X509_num(p.ca) <= 0) {
BIO_printf(p.errorbio,"No trustworthy CA given.%s\n", p.p12file); goto err;
}
if(p.verbose > 1)
X509_print_ex(p.errorbio, p.usercert, 0, 0);
if (p.verbose > 1)
X509_print_ex(p.errorbio,p.usercert,0,0);
/* determine URL to go */
if(hostporturl) {
size_t len = strlen(hostporturl) + 9;
serverurl = malloc(len);
snprintf(serverurl, len, "https://%s", hostporturl);
if (hostporturl) {
serverurl = malloc(9+strlen(hostporturl));
sprintf(serverurl,"https://%s",hostporturl);
}
else if(p.accesstype != 0) { /* see whether we can find an AIA or SIA for a
given access type */
if(!(serverurl = my_get_ext(p.usercert, p.accesstype, NID_info_access))) {
else if (p.accesstype != 0) { /* see whether we can find an AIA or SIA for a given access type */
if (!(serverurl = my_get_ext(p.usercert,p.accesstype,NID_info_access))) {
int j=0;
BIO_printf(p.errorbio, "no service URL in user cert "
BIO_printf(p.errorbio,"no service URL in user cert "
"cherching in others certificats\n");
for(j=0; j<sk_X509_num(p.ca); j++) {
if((serverurl = my_get_ext(sk_X509_value(p.ca, j), p.accesstype,
for (j=0;j<sk_X509_num(p.ca);j++) {
if ((serverurl = my_get_ext(sk_X509_value(p.ca,j),p.accesstype,
NID_info_access)))
break;
if((serverurl = my_get_ext(sk_X509_value(p.ca, j), p.accesstype,
if ((serverurl = my_get_ext(sk_X509_value(p.ca,j),p.accesstype,
NID_sinfo_access)))
break;
}
}
}
if(!serverurl) {
if (!serverurl) {
BIO_printf(p.errorbio, "no service URL in certificats,"
" check '-accesstype (AD_DVCS | ad_timestamping)'"
" or use '-connect'\n");
goto err;
}
if(p.verbose)
if (p.verbose)
BIO_printf(p.errorbio, "Service URL: <%s>\n", serverurl);
curl_easy_setopt(p.curl, CURLOPT_URL, serverurl);
@ -477,39 +438,38 @@ int main(int argc, char **argv)
/* Now specify the POST binary data */
curl_easy_setopt(p.curl, CURLOPT_POSTFIELDS, binaryptr);
curl_easy_setopt(p.curl, CURLOPT_POSTFIELDSIZE, (long)tabLength);
curl_easy_setopt(p.curl, CURLOPT_POSTFIELDSIZE,(long)tabLength);
/* pass our list of custom made headers */
contenttype = malloc(15+strlen(mimetype));
snprintf(contenttype, 15+strlen(mimetype), "Content-type: %s", mimetype);
headers = curl_slist_append(headers, contenttype);
sprintf(contenttype,"Content-type: %s",mimetype);
headers = curl_slist_append(headers,contenttype);
curl_easy_setopt(p.curl, CURLOPT_HTTPHEADER, headers);
if(p.verbose)
if (p.verbose)
BIO_printf(p.errorbio, "Service URL: <%s>\n", serverurl);
{
FILE *outfp;
BIO_get_fp(out, &outfp);
BIO_get_fp(out,&outfp);
curl_easy_setopt(p.curl, CURLOPT_WRITEDATA, outfp);
}
res = curl_easy_setopt(p.curl, CURLOPT_SSL_CTX_FUNCTION, sslctxfun);
res = curl_easy_setopt(p.curl, CURLOPT_SSL_CTX_FUNCTION, sslctxfun) ;
if(res != CURLE_OK)
BIO_printf(p.errorbio, "%d %s=%d %d\n", __LINE__,
"CURLOPT_SSL_CTX_FUNCTION", CURLOPT_SSL_CTX_FUNCTION, res);
if (res != CURLE_OK)
BIO_printf(p.errorbio,"%d %s=%d %d\n", __LINE__, "CURLOPT_SSL_CTX_FUNCTION",CURLOPT_SSL_CTX_FUNCTION,res);
curl_easy_setopt(p.curl, CURLOPT_SSL_CTX_DATA, &p);
{
int lu; int i=0;
while((lu = BIO_read (in, &binaryptr[i], tabLength-i)) >0 ) {
while ((lu = BIO_read (in,&binaryptr[i],tabLength-i)) >0 ) {
i+=lu;
if(i== tabLength) {
if (i== tabLength) {
tabLength+=100;
binaryptr=realloc(binaryptr, tabLength); /* should be more careful */
binaryptr=realloc(binaryptr,tabLength); /* should be more careful */
}
}
tabLength = i;
@ -517,23 +477,23 @@ int main(int argc, char **argv)
/* Now specify the POST binary data */
curl_easy_setopt(p.curl, CURLOPT_POSTFIELDS, binaryptr);
curl_easy_setopt(p.curl, CURLOPT_POSTFIELDSIZE, (long)tabLength);
curl_easy_setopt(p.curl, CURLOPT_POSTFIELDSIZE,(long)tabLength);
/* Perform the request, res will get the return code */
BIO_printf(p.errorbio, "%d %s %d\n", __LINE__, "curl_easy_perform",
BIO_printf(p.errorbio,"%d %s %d\n", __LINE__, "curl_easy_perform",
res = curl_easy_perform(p.curl));
{
int result =curl_easy_getinfo(p.curl, CURLINFO_CONTENT_TYPE, &response);
if(mimetypeaccept && p.verbose)
if(!strcmp(mimetypeaccept, response))
BIO_printf(p.errorbio, "the response has a correct mimetype : %s\n",
int result =curl_easy_getinfo(p.curl,CURLINFO_CONTENT_TYPE,&response);
if( mimetypeaccept && p.verbose)
if(!strcmp(mimetypeaccept,response))
BIO_printf(p.errorbio,"the response has a correct mimetype : %s\n",
response);
else
BIO_printf(p.errorbio, "the response doesn\'t have an acceptable "
BIO_printf(p.errorbio,"the reponse doesn\'t has an acceptable "
"mime type, it is %s instead of %s\n",
response, mimetypeaccept);
response,mimetypeaccept);
}
/*** code d'erreur si accept mime ***, egalement code return HTTP != 200 ***/
@ -549,6 +509,6 @@ int main(int argc, char **argv)
BIO_free(out);
return (EXIT_SUCCESS);
err: BIO_printf(p.errorbio, "error");
err: BIO_printf(p.errorbio,"error");
exit(1);
}

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,10 +19,6 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* Show how CURLOPT_DEBUGFUNCTION can be used.
* </DESC>
*/
#include <stdio.h>
#include <curl/curl.h>
@ -62,14 +58,14 @@ void dump(const char *text,
for(c = 0; (c < width) && (i+c < size); c++) {
/* check for 0D0A; if found, skip past and start a new line of output */
if(nohex && (i+c+1 < size) && ptr[i+c]==0x0D && ptr[i+c+1]==0x0A) {
if (nohex && (i+c+1 < size) && ptr[i+c]==0x0D && ptr[i+c+1]==0x0A) {
i+=(c+2-width);
break;
}
fprintf(stream, "%c",
(ptr[i+c]>=0x20) && (ptr[i+c]<0x80)?ptr[i+c]:'.');
/* check again for 0D0A, to avoid an extra \n if it's at width */
if(nohex && (i+c+2 < size) && ptr[i+c+1]==0x0D && ptr[i+c+2]==0x0A) {
if (nohex && (i+c+2 < size) && ptr[i+c+1]==0x0D && ptr[i+c+2]==0x0A) {
i+=(c+3-width);
break;
}
@ -134,15 +130,8 @@ int main(void)
/* the DEBUGFUNCTION has no effect until we enable VERBOSE */
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);
/* example.com is redirected, so we tell libcurl to follow redirection */
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(curl, CURLOPT_URL, "http://example.com/");
res = curl_easy_perform(curl);
/* Check for errors */
if(res != CURLE_OK)
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
/* always cleanup */
curl_easy_cleanup(curl);

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,10 +19,6 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* multi socket interface together with libev
* </DESC>
*/
/* Example application source code using the multi socket interface to
* download many files at once.
*
@ -119,12 +115,12 @@ static int multi_timer_cb(CURLM *multi, long timeout_ms, GlobalInfo *g)
{
DPRINT("%s %li\n", __PRETTY_FUNCTION__, timeout_ms);
ev_timer_stop(g->loop, &g->timer_event);
if(timeout_ms > 0) {
if (timeout_ms > 0)
{
double t = timeout_ms / 1000;
ev_timer_init(&g->timer_event, timer_cb, t, 0.);
ev_timer_start(g->loop, &g->timer_event);
}
else
}else
timer_cb(g->loop, &g->timer_event, 0);
return 0;
}
@ -132,32 +128,21 @@ static int multi_timer_cb(CURLM *multi, long timeout_ms, GlobalInfo *g)
/* Die if we get a bad CURLMcode somewhere */
static void mcode_or_die(const char *where, CURLMcode code)
{
if(CURLM_OK != code) {
if ( CURLM_OK != code )
{
const char *s;
switch (code) {
case CURLM_BAD_HANDLE:
s="CURLM_BAD_HANDLE";
switch ( code )
{
case CURLM_CALL_MULTI_PERFORM: s="CURLM_CALL_MULTI_PERFORM"; break;
case CURLM_BAD_HANDLE: s="CURLM_BAD_HANDLE"; break;
case CURLM_BAD_EASY_HANDLE: s="CURLM_BAD_EASY_HANDLE"; break;
case CURLM_OUT_OF_MEMORY: s="CURLM_OUT_OF_MEMORY"; break;
case CURLM_INTERNAL_ERROR: s="CURLM_INTERNAL_ERROR"; break;
case CURLM_UNKNOWN_OPTION: s="CURLM_UNKNOWN_OPTION"; break;
case CURLM_LAST: s="CURLM_LAST"; break;
default: s="CURLM_unknown";
break;
case CURLM_BAD_EASY_HANDLE:
s="CURLM_BAD_EASY_HANDLE";
break;
case CURLM_OUT_OF_MEMORY:
s="CURLM_OUT_OF_MEMORY";
break;
case CURLM_INTERNAL_ERROR:
s="CURLM_INTERNAL_ERROR";
break;
case CURLM_UNKNOWN_OPTION:
s="CURLM_UNKNOWN_OPTION";
break;
case CURLM_LAST:
s="CURLM_LAST";
break;
default:
s="CURLM_unknown";
break;
case CURLM_BAD_SOCKET:
s="CURLM_BAD_SOCKET";
case CURLM_BAD_SOCKET: s="CURLM_BAD_SOCKET";
fprintf(MSG_OUT, "ERROR: %s returns %s\n", where, s);
/* ignore this error */
return;
@ -180,8 +165,8 @@ static void check_multi_info(GlobalInfo *g)
CURLcode res;
fprintf(MSG_OUT, "REMAINING: %d\n", g->still_running);
while((msg = curl_multi_info_read(g->multi, &msgs_left))) {
if(msg->msg == CURLMSG_DONE) {
while ((msg = curl_multi_info_read(g->multi, &msgs_left))) {
if (msg->msg == CURLMSG_DONE) {
easy = msg->easy_handle;
res = msg->data.result;
curl_easy_getinfo(easy, CURLINFO_PRIVATE, &conn);
@ -209,7 +194,8 @@ static void event_cb(EV_P_ struct ev_io *w, int revents)
rc = curl_multi_socket_action(g->multi, w->fd, action, &g->still_running);
mcode_or_die("event_cb: curl_multi_socket_action", rc);
check_multi_info(g);
if(g->still_running <= 0) {
if ( g->still_running <= 0 )
{
fprintf(MSG_OUT, "last transfer done, kill timeout\n");
ev_timer_stop(g->loop, &g->timer_event);
}
@ -223,8 +209,7 @@ static void timer_cb(EV_P_ struct ev_timer *w, int revents)
GlobalInfo *g = (GlobalInfo *)w->data;
CURLMcode rc;
rc = curl_multi_socket_action(g->multi, CURL_SOCKET_TIMEOUT, 0,
&g->still_running);
rc = curl_multi_socket_action(g->multi, CURL_SOCKET_TIMEOUT, 0, &g->still_running);
mcode_or_die("timer_cb: curl_multi_socket_action", rc);
check_multi_info(g);
}
@ -233,8 +218,9 @@ static void timer_cb(EV_P_ struct ev_timer *w, int revents)
static void remsock(SockInfo *f, GlobalInfo *g)
{
printf("%s \n", __PRETTY_FUNCTION__);
if(f) {
if(f->evset)
if ( f )
{
if ( f->evset )
ev_io_stop(g->loop, &f->ev);
free(f);
}
@ -252,7 +238,7 @@ static void setsock(SockInfo*f, curl_socket_t s, CURL*e, int act, GlobalInfo*g)
f->sockfd = s;
f->action = act;
f->easy = e;
if(f->evset)
if ( f->evset )
ev_io_stop(g->loop, &f->ev);
ev_io_init(&f->ev, event_cb, f->sockfd, kind);
f->ev.data = g;
@ -284,16 +270,18 @@ static int sock_cb(CURL *e, curl_socket_t s, int what, void *cbp, void *sockp)
fprintf(MSG_OUT,
"socket callback: s=%d e=%p what=%s ", s, e, whatstr[what]);
if(what == CURL_POLL_REMOVE) {
if ( what == CURL_POLL_REMOVE )
{
fprintf(MSG_OUT, "\n");
remsock(fdp, g);
}
else {
if(!fdp) {
} else
{
if ( !fdp )
{
fprintf(MSG_OUT, "Adding data: %s\n", whatstr[what]);
addsock(s, e, what, g);
}
else {
} else
{
fprintf(MSG_OUT,
"Changing action from %s to %s\n",
whatstr[fdp->action], whatstr[what]);
@ -339,7 +327,8 @@ static void new_conn(char *url, GlobalInfo *g )
conn->error[0]='\0';
conn->easy = curl_easy_init();
if(!conn->easy) {
if ( !conn->easy )
{
fprintf(MSG_OUT, "curl_easy_init() failed, exiting!\n");
exit(2);
}
@ -347,7 +336,7 @@ static void new_conn(char *url, GlobalInfo *g )
conn->url = strdup(url);
curl_easy_setopt(conn->easy, CURLOPT_URL, conn->url);
curl_easy_setopt(conn->easy, CURLOPT_WRITEFUNCTION, write_cb);
curl_easy_setopt(conn->easy, CURLOPT_WRITEDATA, conn);
curl_easy_setopt(conn->easy, CURLOPT_WRITEDATA, &conn);
curl_easy_setopt(conn->easy, CURLOPT_VERBOSE, 1L);
curl_easy_setopt(conn->easy, CURLOPT_ERRORBUFFER, conn->error);
curl_easy_setopt(conn->easy, CURLOPT_PRIVATE, conn);
@ -374,16 +363,16 @@ static void fifo_cb(EV_P_ struct ev_io *w, int revents)
int n=0;
GlobalInfo *g = (GlobalInfo *)w->data;
do {
do
{
s[0]='\0';
rv=fscanf(g->input, "%1023s%n", s, &n);
s[n]='\0';
if(n && s[0]) {
new_conn(s, g); /* if we read a URL, go get it! */
}
else
break;
} while(rv != EOF);
if ( n && s[0] )
{
new_conn(s,g); /* if we read a URL, go get it! */
} else break;
} while ( rv != EOF );
}
/* Create a named pipe and tell libevent to monitor it */
@ -394,20 +383,24 @@ static int init_fifo (GlobalInfo *g)
curl_socket_t sockfd;
fprintf(MSG_OUT, "Creating named pipe \"%s\"\n", fifo);
if(lstat (fifo, &st) == 0) {
if((st.st_mode & S_IFMT) == S_IFREG) {
if ( lstat (fifo, &st) == 0 )
{
if ( (st.st_mode & S_IFMT) == S_IFREG )
{
errno = EEXIST;
perror("lstat");
exit (1);
}
}
unlink(fifo);
if(mkfifo (fifo, 0600) == -1) {
if ( mkfifo (fifo, 0600) == -1 )
{
perror("mkfifo");
exit (1);
}
sockfd = open(fifo, O_RDWR | O_NONBLOCK, 0);
if(sockfd == -1) {
if ( sockfd == -1 )
{
perror("open");
exit (1);
}
@ -416,7 +409,7 @@ static int init_fifo (GlobalInfo *g)
fprintf(MSG_OUT, "Now, pipe some URL's into > %s\n", fifo);
ev_io_init(&g->fifo_event, fifo_cb, sockfd, EV_READ);
ev_io_start(g->loop, &g->fifo_event);
return (0);
return(0);
}
int main(int argc, char **argv)

View File

@ -1,154 +0,0 @@
/***************************************************************************
* _ _ ____ _
* Project ___| | | | _ \| |
* / __| | | | |_) | |
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
* furnished to do so, under the terms of the COPYING file.
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* An example demonstrating how an application can pass in a custom
* socket to libcurl to use. This example also handles the connect itself.
* </DESC>
*/
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <curl/curl.h>
#ifdef WIN32
#include <windows.h>
#include <winsock2.h>
#include <ws2tcpip.h>
#define close closesocket
#else
#include <sys/types.h> /* socket types */
#include <sys/socket.h> /* socket definitions */
#include <netinet/in.h>
#include <arpa/inet.h> /* inet (3) funtions */
#include <unistd.h> /* misc. Unix functions */
#endif
#include <errno.h>
/* The IP address and port number to connect to */
#define IPADDR "127.0.0.1"
#define PORTNUM 80
#ifndef INADDR_NONE
#define INADDR_NONE 0xffffffff
#endif
static size_t write_data(void *ptr, size_t size, size_t nmemb, void *stream)
{
size_t written = fwrite(ptr, size, nmemb, (FILE *)stream);
return written;
}
static curl_socket_t opensocket(void *clientp,
curlsocktype purpose,
struct curl_sockaddr *address)
{
curl_socket_t sockfd;
(void)purpose;
(void)address;
sockfd = *(curl_socket_t *)clientp;
/* the actual externally set socket is passed in via the OPENSOCKETDATA
option */
return sockfd;
}
static int sockopt_callback(void *clientp, curl_socket_t curlfd,
curlsocktype purpose)
{
(void)clientp;
(void)curlfd;
(void)purpose;
/* This return code was added in libcurl 7.21.5 */
return CURL_SOCKOPT_ALREADY_CONNECTED;
}
int main(void)
{
CURL *curl;
CURLcode res;
struct sockaddr_in servaddr; /* socket address structure */
curl_socket_t sockfd;
#ifdef WIN32
WSADATA wsaData;
int initwsa;
if((initwsa = WSAStartup(MAKEWORD(2, 0), &wsaData)) != 0) {
printf("WSAStartup failed: %d\n", initwsa);
return 1;
}
#endif
curl = curl_easy_init();
if(curl) {
/*
* Note that libcurl will internally think that you connect to the host
* and port that you specify in the URL option.
*/
curl_easy_setopt(curl, CURLOPT_URL, "http://99.99.99.99:9999");
/* Create the socket "manually" */
if((sockfd = socket(AF_INET, SOCK_STREAM, 0)) == CURL_SOCKET_BAD ) {
printf("Error creating listening socket.\n");
return 3;
}
memset(&servaddr, 0, sizeof(servaddr));
servaddr.sin_family = AF_INET;
servaddr.sin_port = htons(PORTNUM);
if(INADDR_NONE == (servaddr.sin_addr.s_addr = inet_addr(IPADDR)))
return 2;
if(connect(sockfd, (struct sockaddr *) &servaddr, sizeof(servaddr)) ==
-1) {
close(sockfd);
printf("client error: connect: %s\n", strerror(errno));
return 1;
}
/* no progress meter please */
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 1L);
/* send all data to this function */
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data);
/* call this function to get a socket */
curl_easy_setopt(curl, CURLOPT_OPENSOCKETFUNCTION, opensocket);
curl_easy_setopt(curl, CURLOPT_OPENSOCKETDATA, &sockfd);
/* call this function to set options for the socket */
curl_easy_setopt(curl, CURLOPT_SOCKOPTFUNCTION, sockopt_callback);
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1);
res = curl_easy_perform(curl);
curl_easy_cleanup(curl);
if(res) {
printf("libcurl error: %d\n", res);
return 4;
}
}
return 0;
}

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2015, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,10 +19,6 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* Upload to a file:// URL
* </DESC>
*/
#include <stdio.h>
#include <curl/curl.h>
#include <sys/stat.h>
@ -68,21 +64,14 @@ int main(void)
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);
res = curl_easy_perform(curl);
/* Check for errors */
if(res != CURLE_OK) {
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
}
else {
/* now extract transfer info */
curl_easy_getinfo(curl, CURLINFO_SPEED_UPLOAD, &speed_upload);
curl_easy_getinfo(curl, CURLINFO_TOTAL_TIME, &total_time);
/* now extract transfer info */
curl_easy_getinfo(curl, CURLINFO_SPEED_UPLOAD, &speed_upload);
curl_easy_getinfo(curl, CURLINFO_TOTAL_TIME, &total_time);
fprintf(stderr, "Speed: %.3f bytes/sec during %.3f seconds\n",
speed_upload, total_time);
fprintf(stderr, "Speed: %.3f bytes/sec during %.3f seconds\n",
speed_upload, total_time);
}
/* always cleanup */
curl_easy_cleanup(curl);
}

View File

@ -42,10 +42,6 @@
*
* This example requires libcurl 7.9.7 or later.
*/
/* <DESC>
* implements an fopen() abstraction allowing reading from URLs
* </DESC>
*/
#include <stdio.h>
#include <string.h>
@ -80,7 +76,7 @@ struct fcurl_data
typedef struct fcurl_data URL_FILE;
/* exported functions */
URL_FILE *url_fopen(const char *url, const char *operation);
URL_FILE *url_fopen(const char *url,const char *operation);
int url_fclose(URL_FILE *file);
int url_feof(URL_FILE *file);
size_t url_fread(void *ptr, size_t size, size_t nmemb, URL_FILE *file);
@ -106,13 +102,13 @@ static size_t write_callback(char *buffer,
if(size > rembuff) {
/* not enough space in buffer */
newbuff=realloc(url->buffer, url->buffer_len + (size - rembuff));
newbuff=realloc(url->buffer,url->buffer_len + (size - rembuff));
if(newbuff==NULL) {
fprintf(stderr, "callback buffer grow failed\n");
fprintf(stderr,"callback buffer grow failed\n");
size=rembuff;
}
else {
/* realloc succeeded increase buffer size*/
/* realloc suceeded increase buffer size*/
url->buffer_len+=size - rembuff;
url->buffer=newbuff;
}
@ -132,10 +128,9 @@ static int fill_buffer(URL_FILE *file, size_t want)
fd_set fdexcep;
struct timeval timeout;
int rc;
CURLMcode mc; /* curl_multi_fdset() return code */
/* only attempt to fill buffer if transactions still running and buffer
* doesn't exceed required size already
* doesnt exceed required size already
*/
if((!file->still_running) || (file->buffer_pos > want))
return 0;
@ -163,34 +158,15 @@ static int fill_buffer(URL_FILE *file, size_t want)
}
/* get file descriptors from the transfers */
mc = curl_multi_fdset(multi_handle, &fdread, &fdwrite, &fdexcep, &maxfd);
curl_multi_fdset(multi_handle, &fdread, &fdwrite, &fdexcep, &maxfd);
if(mc != CURLM_OK) {
fprintf(stderr, "curl_multi_fdset() failed, code %d.\n", mc);
break;
}
/* In a real-world program you OF COURSE check the return code of the
function calls. On success, the value of maxfd is guaranteed to be
greater or equal than -1. We call select(maxfd + 1, ...), specially
in case of (maxfd == -1), we call select(0, ...), which is basically
equal to sleep. */
/* On success the value of maxfd is guaranteed to be >= -1. We call
select(maxfd + 1, ...); specially in case of (maxfd == -1) there are
no fds ready yet so we call select(0, ...) --or Sleep() on Windows--
to sleep 100ms, which is the minimum suggested value in the
curl_multi_fdset() doc. */
if(maxfd == -1) {
#ifdef _WIN32
Sleep(100);
rc = 0;
#else
/* Portable sleep for platforms other than Windows. */
struct timeval wait = { 0, 100 * 1000 }; /* 100ms */
rc = select(0, NULL, NULL, NULL, &wait);
#endif
}
else {
/* Note that on some platforms 'timeout' may be modified by select().
If you need access to the original value save a copy beforehand. */
rc = select(maxfd+1, &fdread, &fdwrite, &fdexcep, &timeout);
}
rc = select(maxfd+1, &fdread, &fdwrite, &fdexcep, &timeout);
switch(rc) {
case -1:
@ -208,12 +184,14 @@ static int fill_buffer(URL_FILE *file, size_t want)
}
/* use to remove want bytes from the front of a files buffer */
static int use_buffer(URL_FILE *file, size_t want)
static int use_buffer(URL_FILE *file,int want)
{
/* sort out buffer */
if((file->buffer_pos - want) <=0) {
/* ditch buffer - write will recreate */
free(file->buffer);
if(file->buffer)
free(file->buffer);
file->buffer=NULL;
file->buffer_pos=0;
file->buffer_len=0;
@ -229,10 +207,10 @@ static int use_buffer(URL_FILE *file, size_t want)
return 0;
}
URL_FILE *url_fopen(const char *url, const char *operation)
URL_FILE *url_fopen(const char *url,const char *operation)
{
/* this code could check for URLs or types in the 'url' and
basically use the real fopen() for standard files */
basicly use the real fopen() for standard files */
URL_FILE *file;
(void)operation;
@ -243,7 +221,7 @@ URL_FILE *url_fopen(const char *url, const char *operation)
memset(file, 0, sizeof(URL_FILE));
if((file->handle.file=fopen(url, operation)))
if((file->handle.file=fopen(url,operation)))
file->type = CFTYPE_FILE; /* marked as URL */
else {
@ -303,7 +281,9 @@ int url_fclose(URL_FILE *file)
break;
}
free(file->buffer);/* free any allocated buffer space */
if(file->buffer)
free(file->buffer);/* free any allocated buffer space */
free(file);
return ret;
@ -337,13 +317,13 @@ size_t url_fread(void *ptr, size_t size, size_t nmemb, URL_FILE *file)
switch(file->type) {
case CFTYPE_FILE:
want=fread(ptr, size, nmemb, file->handle.file);
want=fread(ptr,size,nmemb,file->handle.file);
break;
case CFTYPE_CURL:
want = nmemb * size;
fill_buffer(file, want);
fill_buffer(file,want);
/* check if theres data in the buffer - if not fill_buffer()
* either errored or EOF */
@ -357,7 +337,7 @@ size_t url_fread(void *ptr, size_t size, size_t nmemb, URL_FILE *file)
/* xfer data to caller */
memcpy(ptr, file->buffer, want);
use_buffer(file, want);
use_buffer(file,want);
want = want / size; /* number of items */
break;
@ -378,11 +358,11 @@ char *url_fgets(char *ptr, size_t size, URL_FILE *file)
switch(file->type) {
case CFTYPE_FILE:
ptr = fgets(ptr, (int)size, file->handle.file);
ptr = fgets(ptr,size,file->handle.file);
break;
case CFTYPE_CURL:
fill_buffer(file, want);
fill_buffer(file,want);
/* check if theres data in the buffer - if not fill either errored or
* EOF */
@ -406,7 +386,7 @@ char *url_fgets(char *ptr, size_t size, URL_FILE *file)
memcpy(ptr, file->buffer, want);
ptr[want]=0;/* allways null terminate */
use_buffer(file, want);
use_buffer(file,want);
break;
@ -434,7 +414,9 @@ void url_rewind(URL_FILE *file)
curl_multi_add_handle(multi_handle, file->handle.curl);
/* ditch buffer - write will recreate - resets stream pos*/
free(file->buffer);
if(file->buffer)
free(file->buffer);
file->buffer=NULL;
file->buffer_pos=0;
file->buffer_len=0;
@ -446,10 +428,6 @@ void url_rewind(URL_FILE *file)
}
}
#define FGETSFILE "fgets.test"
#define FREADFILE "fread.test"
#define REWINDFILE "rewind.test"
/* Small main program to retrive from a url using fgets and fread saving the
* output to two test files (note the fgets method will corrupt binary files if
* they contain 0 chars */
@ -458,7 +436,7 @@ int main(int argc, char *argv[])
URL_FILE *handle;
FILE *outf;
size_t nread;
int nread;
char buffer[256];
const char *url;
@ -468,7 +446,7 @@ int main(int argc, char *argv[])
url=argv[1];/* use passed url */
/* copy from url line by line with fgets */
outf=fopen(FGETSFILE, "wb+");
outf=fopen("fgets.test","w+");
if(!outf) {
perror("couldn't open fgets output file\n");
return 1;
@ -482,8 +460,8 @@ int main(int argc, char *argv[])
}
while(!url_feof(handle)) {
url_fgets(buffer, sizeof(buffer), handle);
fwrite(buffer, 1, strlen(buffer), outf);
url_fgets(buffer,sizeof(buffer),handle);
fwrite(buffer,1,strlen(buffer),outf);
}
url_fclose(handle);
@ -492,7 +470,7 @@ int main(int argc, char *argv[])
/* Copy from url with fread */
outf=fopen(FREADFILE, "wb+");
outf=fopen("fread.test","w+");
if(!outf) {
perror("couldn't open fread output file\n");
return 1;
@ -506,8 +484,8 @@ int main(int argc, char *argv[])
}
do {
nread = url_fread(buffer, 1, sizeof(buffer), handle);
fwrite(buffer, 1, nread, outf);
nread = url_fread(buffer, 1,sizeof(buffer), handle);
fwrite(buffer,1,nread,outf);
} while(nread);
url_fclose(handle);
@ -516,7 +494,7 @@ int main(int argc, char *argv[])
/* Test rewind */
outf=fopen(REWINDFILE, "wb+");
outf=fopen("rewind.test","w+");
if(!outf) {
perror("couldn't open fread output file\n");
return 1;
@ -529,19 +507,21 @@ int main(int argc, char *argv[])
return 2;
}
nread = url_fread(buffer, 1, sizeof(buffer), handle);
fwrite(buffer, 1, nread, outf);
nread = url_fread(buffer, 1,sizeof(buffer), handle);
fwrite(buffer,1,nread,outf);
url_rewind(handle);
buffer[0]='\n';
fwrite(buffer, 1, 1, outf);
fwrite(buffer,1,1,outf);
nread = url_fread(buffer, 1,sizeof(buffer), handle);
fwrite(buffer,1,nread,outf);
nread = url_fread(buffer, 1, sizeof(buffer), handle);
fwrite(buffer, 1, nread, outf);
url_fclose(handle);
fclose(outf);
return 0;/* all done */
}

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -19,10 +19,6 @@
* KIND, either express or implied.
*
***************************************************************************/
/* <DESC>
* FTP wildcard pattern matching
* </DESC>
*/
#include <curl/curl.h>
#include <stdio.h>
@ -30,14 +26,14 @@ struct callback_data {
FILE *output;
};
static long file_is_coming(struct curl_fileinfo *finfo,
struct callback_data *data,
int remains);
static long file_is_comming(struct curl_fileinfo *finfo,
struct callback_data *data,
int remains);
static long file_is_downloaded(struct callback_data *data);
static size_t write_it(char *buff, size_t size, size_t nmemb,
void *cb_data);
struct callback_data *data);
int main(int argc, char **argv)
{
@ -65,7 +61,7 @@ int main(int argc, char **argv)
curl_easy_setopt(handle, CURLOPT_WILDCARDMATCH, 1L);
/* callback is called before download of concrete file started */
curl_easy_setopt(handle, CURLOPT_CHUNK_BGN_FUNCTION, file_is_coming);
curl_easy_setopt(handle, CURLOPT_CHUNK_BGN_FUNCTION, file_is_comming);
/* callback is called after data from the file have been transferred */
curl_easy_setopt(handle, CURLOPT_CHUNK_END_FUNCTION, file_is_downloaded);
@ -93,9 +89,9 @@ int main(int argc, char **argv)
return rc;
}
static long file_is_coming(struct curl_fileinfo *finfo,
struct callback_data *data,
int remains)
static long file_is_comming(struct curl_fileinfo *finfo,
struct callback_data *data,
int remains)
{
printf("%3d %40s %10luB ", remains, finfo->filename,
(unsigned long)finfo->size);
@ -119,7 +115,7 @@ static long file_is_coming(struct curl_fileinfo *finfo,
return CURL_CHUNK_BGN_FUNC_SKIP;
}
data->output = fopen(finfo->filename, "wb");
data->output = fopen(finfo->filename, "w");
if(!data->output) {
return CURL_CHUNK_BGN_FUNC_FAIL;
}
@ -139,9 +135,8 @@ static long file_is_downloaded(struct callback_data *data)
}
static size_t write_it(char *buff, size_t size, size_t nmemb,
void *cb_data)
struct callback_data *data)
{
struct callback_data *data = cb_data;
size_t written = 0;
if(data->output)
written = fwrite(buff, size, nmemb, data->output);

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -22,10 +22,14 @@
#include <stdio.h>
#include <curl/curl.h>
#include <curl/types.h>
#include <curl/easy.h>
/* <DESC>
* Get a single file from an FTP server.
* </DESC>
/*
* This is an example showing how to get a single file from an FTP server.
* It delays the actual destination file creation until the first write
* callback so that it won't create an empty file in case the remote file
* doesn't exist or something else fails.
*/
struct FtpFile {
@ -51,7 +55,7 @@ int main(void)
CURL *curl;
CURLcode res;
struct FtpFile ftpfile={
"curl.tar.gz", /* name to store the file as if successful */
"curl.tar.gz", /* name to store the file as if succesful */
NULL
};
@ -63,7 +67,7 @@ int main(void)
* You better replace the URL with one that works!
*/
curl_easy_setopt(curl, CURLOPT_URL,
"ftp://ftp.example.com/curl/curl-7.9.2.tar.gz");
"ftp://ftp.example.com/pub/www/utilities/curl/curl-7.9.2.tar.gz");
/* Define our callback to get called when there's data to be written */
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, my_fwrite);
/* Set a pointer to our struct to pass to the callback */

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -23,10 +23,12 @@
#include <string.h>
#include <curl/curl.h>
#include <curl/types.h>
#include <curl/easy.h>
/* <DESC>
* Checks a single file's size and mtime from an FTP server.
* </DESC>
/*
* This is an example showing how to check a single file's size and mtime
* from an FTP server.
*/
static size_t throw_away(void *ptr, size_t size, size_t nmemb, void *data)
@ -43,8 +45,8 @@ int main(void)
char ftpurl[] = "ftp://ftp.example.com/gnu/binutils/binutils-2.19.1.tar.bz2";
CURL *curl;
CURLcode res;
long filetime = -1;
double filesize = 0.0;
const time_t filetime;
const double filesize;
const char *filename = strrchr(ftpurl, '/') + 1;
curl_global_init(CURL_GLOBAL_DEFAULT);
@ -65,18 +67,14 @@ int main(void)
res = curl_easy_perform(curl);
if(CURLE_OK == res) {
/* https://curl.haxx.se/libcurl/c/curl_easy_getinfo.html */
/* http://curl.haxx.se/libcurl/c/curl_easy_getinfo.html */
res = curl_easy_getinfo(curl, CURLINFO_FILETIME, &filetime);
if((CURLE_OK == res) && (filetime >= 0)) {
time_t file_time = (time_t)filetime;
printf("filetime %s: %s", filename, ctime(&file_time));
}
res = curl_easy_getinfo(curl, CURLINFO_CONTENT_LENGTH_DOWNLOAD,
&filesize);
if((CURLE_OK == res) && (filesize>0.0))
if((CURLE_OK == res) && filetime)
printf("filetime %s: %s", filename, ctime(&filetime));
res = curl_easy_getinfo(curl, CURLINFO_CONTENT_LENGTH_DOWNLOAD, &filesize);
if((CURLE_OK == res) && (filesize>0))
printf("filesize %s: %0.0f bytes\n", filename, filesize);
}
else {
} else {
/* we failed */
fprintf(stderr, "curl told us %d\n", res);
}

View File

@ -5,11 +5,11 @@
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2016, Daniel Stenberg, <daniel@haxx.se>, et al.
* Copyright (C) 1998 - 2011, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
* are also available at http://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
@ -22,12 +22,16 @@
#include <stdio.h>
#include <curl/curl.h>
#include <curl/types.h>
#include <curl/easy.h>
/* <DESC>
* Similar to ftpget.c but also stores the received response-lines
/*
* Similar to ftpget.c but this also stores the received response-lines
* in a separate file using our own callback!
* </DESC>
*
* This functionality was introduced in libcurl 7.9.3.
*/
static size_t
write_response(void *ptr, size_t size, size_t nmemb, void *data)
{
@ -35,9 +39,6 @@ write_response(void *ptr, size_t size, size_t nmemb, void *data)
return fwrite(ptr, size, nmemb, writehere);
}
#define FTPBODY "ftp-list"
#define FTPHEADERS "ftp-responses"
int main(void)
{
CURL *curl;
@ -46,10 +47,10 @@ int main(void)
FILE *respfile;
/* local file name to store the file as */
ftpfile = fopen(FTPBODY, "wb"); /* b is binary, needed on win32 */
ftpfile = fopen("ftp-list", "wb"); /* b is binary, needed on win32 */
/* local file name to store the FTP server's response lines in */
respfile = fopen(FTPHEADERS, "wb"); /* b is binary, needed on win32 */
respfile = fopen("ftp-responses", "wb"); /* b is binary, needed on win32 */
curl = curl_easy_init();
if(curl) {
@ -59,12 +60,8 @@ int main(void)
/* If you intend to use this on windows with a libcurl DLL, you must use
CURLOPT_WRITEFUNCTION as well */
curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, write_response);
curl_easy_setopt(curl, CURLOPT_HEADERDATA, respfile);
curl_easy_setopt(curl, CURLOPT_WRITEHEADER, respfile);
res = curl_easy_perform(curl);
/* Check for errors */
if(res != CURLE_OK)
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
/* always cleanup */
curl_easy_cleanup(curl);

View File

@ -1,99 +0,0 @@
/***************************************************************************
* _ _ ____ _
* Project ___| | | | _ \| |
* / __| | | | |_) | |
* | (__| |_| | _ <| |___
* \___|\___/|_| \_\_____|
*
* Copyright (C) 1998 - 2015, Daniel Stenberg, <daniel@haxx.se>, et al.
*
* This software is licensed as described in the file COPYING, which
* you should have received as part of this distribution. The terms
* are also available at https://curl.haxx.se/docs/copyright.html.
*
* You may opt to use, copy, modify, merge, publish, distribute and/or sell
* copies of the Software, and permit persons to whom the Software is
* furnished to do so, under the terms of the COPYING file.
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
* KIND, either express or implied.
*
***************************************************************************/
#include <stdio.h>
#include <curl/curl.h>
/* <DESC>
* Get a single file from an FTPS server.
* </DESC>
*/
struct FtpFile {
const char *filename;
FILE *stream;
};
static size_t my_fwrite(void *buffer, size_t size, size_t nmemb,
void *stream)
{
struct FtpFile *out=(struct FtpFile *)stream;
if(out && !out->stream) {
/* open file for writing */
out->stream=fopen(out->filename, "wb");
if(!out->stream)
return -1; /* failure, can't open file to write */
}
return fwrite(buffer, size, nmemb, out->stream);
}
int main(void)
{
CURL *curl;
CURLcode res;
struct FtpFile ftpfile={
"yourfile.bin", /* name to store the file as if successful */
NULL
};
curl_global_init(CURL_GLOBAL_DEFAULT);
curl = curl_easy_init();
if(curl) {
/*
* You better replace the URL with one that works! Note that we use an
* FTP:// URL with standard explicit FTPS. You can also do FTPS:// URLs if
* you want to do the rarer kind of transfers: implicit.
*/
curl_easy_setopt(curl, CURLOPT_URL,
"ftp://user@server/home/user/file.txt");
/* Define our callback to get called when there's data to be written */
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, my_fwrite);
/* Set a pointer to our struct to pass to the callback */
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &ftpfile);
/* We activate SSL and we require it for both control and data */
curl_easy_setopt(curl, CURLOPT_USE_SSL, CURLUSESSL_ALL);
/* Switch on full protocol/debug output */
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);
res = curl_easy_perform(curl);
/* always cleanup */
curl_easy_cleanup(curl);
if(CURLE_OK != res) {
/* we failed */
fprintf(stderr, "curl told us %d\n", res);
}
}
if(ftpfile.stream)
fclose(ftpfile.stream); /* close the local file */
curl_global_cleanup();
return 0;
}

Some files were not shown because too many files have changed in this diff Show More