m3sfit
m3sfit

Reputation: 77

How to use SSPI to authenticate with tcp server using openssl?

I am self-studying SSPI and how to program a client for secure connections, I am using a linux based openssl tcp server written in C and for the windows client.

So far I know that TLS handshake happens over multiple calls to InitializeSecurityContext, but cannot pinpoint the reason why I am getting SEC_E_INVALID_TOKEN after the first call to InitializeSecurityContext

I am developing the client based on the following POC

int main()
{
    SOCKET soc;
    WSADATA wsa_data;
    
    if (WSAStartup(0x0101, &wsa_data) != 0)
    {
        fprintf(stderr, "Failed to initialize wsa\n");
        return 1;
    }

    SOCKADDR_IN server;

    soc = WSASocket(AF_INET, SOCK_STREAM, 0, NULL,  0, WSA_FLAG_OVERLAPPED);
    if (soc == INVALID_SOCKET)
    {
        fprintf(stderr, "Failed to create socket\n");
        return 1;
    }

    server.sin_family = AF_INET;
    server.sin_port = htons(PORT);
    server.sin_addr.s_addr = inet_addr(HOST);

    if (WSAConnect(soc, (SOCKADDR *)&server, sizeof(server), NULL, NULL, NULL, NULL) == SOCKET_ERROR)
    {
        fprintf(stderr, "Failed to connect to server\n");
        return 1;
    }

    
    // negotiate tls certificate

    CredHandle handle;
    SCHANNEL_CRED cred =
    {
        .dwVersion = SCHANNEL_CRED_VERSION,
        .dwFlags = SCH_USE_STRONG_CRYPTO          // use only strong crypto alogorithms
                    | SCH_CRED_AUTO_CRED_VALIDATION  // automatically validate server certificate
                    | SCH_CRED_NO_DEFAULT_CREDS,     // no client certificate authentication
        .grbitEnabledProtocols = SP_PROT_TLS1_2,  // allow only TLS v1.2
    };

    if (AcquireCredentialsHandleA(NULL, UNISP_NAME_A, SECPKG_CRED_OUTBOUND, NULL, &cred, NULL, NULL, &handle, NULL) != SEC_E_OK)
    {
        fprintf(stderr, "Failed to acquire credential handler\n");
        return 1;
    }
    
    

    CtxtHandle ctx = {0};
    
    char incoming[TLS_MAX_PACKET_SIZE];
    int received = 0;


    SecBuffer inbuffers[2] = {0};
    inbuffers[0].BufferType = SECBUFFER_TOKEN;
    inbuffers[0].pvBuffer = incoming;
    inbuffers[0].cbBuffer = received;
    inbuffers[1].BufferType = SECBUFFER_EMPTY;

    SecBuffer outbuffers[1] = {0};
    outbuffers[0].BufferType = SECBUFFER_TOKEN;

    SecBufferDesc indesc = {SECBUFFER_VERSION, sizeof(inbuffers), inbuffers};
    SecBufferDesc outdesc = {SECBUFFER_VERSION, sizeof(outbuffers), outbuffers};

    DWORD flags = ISC_REQ_USE_SUPPLIED_CREDS | ISC_REQ_ALLOCATE_MEMORY | ISC_REQ_CONFIDENTIALITY | ISC_REQ_REPLAY_DETECT | ISC_REQ_SEQUENCE_DETECT | ISC_REQ_STREAM;

    // DWORD contextAttributes = 0;
    SECURITY_STATUS sec = InitializeSecurityContextA(
        &handle,            // Handle to the credentials (CredHandle from AcquireCredentialsHandle)
        NULL,               // No existing context (for the first call), or &ctx for subsequent calls
        (SEC_CHAR*)HOST,    // Target server name (pointer cast to SEC_CHAR*)
        flags,              // Request flags (e.g., ISC_REQ_CONFIDENTIALITY, etc.)
        0,                  // Reserved1, should be 0
        SECURITY_NATIVE_DREP, // Data representation (SECURITY_NATIVE_DREP is the usual value)
        &indesc,            // Pointer to input buffer descriptor
        0,                  // Reserved2, should be 0
        &ctx,               // Pointer to context handle to receive the new context
        &outdesc,           // Pointer to output buffer descriptor
        &flags, // Pointer to receive context attributes (DWORD)
        NULL                // Pointer to receive expiration time (optional)
    );

Upvotes: 1

Views: 44

Answers (0)

Related Questions