| cublas.h | | cublas.h | |
| /* | | /* | |
|
| * Copyright 1993-2010 NVIDIA Corporation. All rights reserved. | | * Copyright 1993-2011 NVIDIA Corporation. All rights reserved. | |
| * | | * | |
|
| * NOTICE TO USER: | | * NOTICE TO LICENSEE: | |
| * | | * | |
|
| * This source code is subject to NVIDIA ownership rights under U.S. and | | * This source code and/or documentation ("Licensed Deliverables") are | |
| * international Copyright laws. Users and possessors of this source code | | * subject to NVIDIA intellectual property rights under U.S. and | |
| * are hereby granted a nonexclusive, royalty-free license to use this code | | * international Copyright laws. | |
| * in individual and commercial software. | | | |
| * | | * | |
|
| * NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOURCE | | * These Licensed Deliverables contained herein is PROPRIETARY and | |
| * CODE FOR ANY PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR | | * CONFIDENTIAL to NVIDIA and is being provided under the terms and | |
| * IMPLIED WARRANTY OF ANY KIND. NVIDIA DISCLAIMS ALL WARRANTIES WITH | | * conditions of a form of NVIDIA software license agreement by and | |
| * REGARD TO THIS SOURCE CODE, INCLUDING ALL IMPLIED WARRANTIES OF | | * between NVIDIA and Licensee ("License Agreement") or electronically | |
| * MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE. | | * accepted by Licensee. Notwithstanding any terms or conditions to | |
| * IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL | | * the contrary in the License Agreement, reproduction or disclosure | |
| , | | * of the Licensed Deliverables to any third party without the express | |
| * OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS | | * written consent of NVIDIA is prohibited. | |
| * OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE | | | |
| * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE | | | |
| * OR PERFORMANCE OF THIS SOURCE CODE. | | | |
| * | | * | |
|
| * U.S. Government End Users. This source code is a "commercial item" as | | * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE | |
| * that term is defined at 48 C.F.R. 2.101 (OCT 1995), consisting of | | * LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE | |
| * "commercial computer software" and "commercial computer software | | * SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE. IT IS | |
| * documentation" as such terms are used in 48 C.F.R. 12.212 (SEPT 1995) | | * PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND. | |
| * and is provided to the U.S. Government only as a commercial end item. | | * NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED | |
| * Consistent with 48 C.F.R.12.212 and 48 C.F.R. 227.7202-1 through | | * DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY, | |
| * 227.7202-4 (JUNE 1995), all U.S. Government End Users acquire the | | * NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE. | |
| * source code with only those rights set forth herein. | | * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE | |
| | | * LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY | |
| | | * SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY | |
| | | * DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, | |
| | | * WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS | |
| | | * ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE | |
| | | * OF THESE LICENSED DELIVERABLES. | |
| * | | * | |
|
| * Any use of this source code in individual and commercial software must | | * U.S. Government End Users. These Licensed Deliverables are a | |
| * include, in the user documentation and internal comments to the code, | | * "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT | |
| * the above Disclaimer and U.S. Government End Users Notice. | | * 1995), consisting of "commercial computer software" and "commercial | |
| | | * computer software documentation" as such terms are used in 48 | |
| | | * C.F.R. 12.212 (SEPT 1995) and is provided to the U.S. Government | |
| | | * only as a commercial end item. Consistent with 48 C.F.R.12.212 and | |
| | | * 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all | |
| | | * U.S. Government End Users acquire the Licensed Deliverables with | |
| | | * only those rights set forth herein. | |
| | | * | |
| | | * Any use of the Licensed Deliverables in individual and commercial | |
| | | * software must include, in the user documentation and internal | |
| | | * comments to the code, the above Disclaimer and U.S. Government End | |
| | | * Users Notice. | |
| */ | | */ | |
| | | | |
| /* | | /* | |
| * This is the public header file for the CUBLAS library, defining the API | | * This is the public header file for the CUBLAS library, defining the API | |
| * | | * | |
| * CUBLAS is an implementation of BLAS (Basic Linear Algebra Subroutines) | | * CUBLAS is an implementation of BLAS (Basic Linear Algebra Subroutines) | |
|
| * on top of the CUDA driver. It allows access to the computational resourc | | * on top of the CUDA runtime. | |
| es | | | |
| * of NVIDIA GPUs. The library is self-contained at the API level, i.e. no | | | |
| * direct interaction with the CUDA driver is necessary. | | | |
| * | | | |
| * The basic model by which applications use the CUBLAS library is to creat | | | |
| e | | | |
| * matrix and vector object in GPU memory space, fill them with data, then | | | |
| * call a sequence of BLAS functions, and finally upload the results from G | | | |
| PU | | | |
| * memory space back to the host. To accomplish this, CUBLAS provides helpe | | | |
| r | | | |
| * functions for creating and destroying objects in GPU space, and to write | | | |
| * data to, and retrieve data from, these objects. | | | |
| * | | | |
| * For maximum compatibility with existing Fortran environments, | | | |
| * CUBLAS uses column-major storage and 1-based indexing. Calls to | | | |
| * CUBLAS functions look very similar to calls to the original Fortran | | | |
| * BLAS functions. For example, the Fortran function call | | | |
| * | | | |
| * SDOT(KRANK+1-J,W(I,J),MDW,W(J,J),MDW) | | | |
| * | | | |
| * would map to the CUBLAS function call made from C/C++ code: | | | |
| * | | | |
| * #define IDX2(i,j,lead_dim) (((j)*(lead_dim))+(i)) | | | |
| * sdot(krank+1-j,w[IDX2(i,j,ldw)], mdw, w[IDX2(i,j,ldw)], mdw) | | | |
| * | | | |
| * Since the BLAS core functions (as opposed to the helper functions) do | | | |
| * not return error status directly (for reasons of compatibility with | | | |
| * existing BLAS libraries) CUBLAS provides a separate function to | | | |
| * retrieve the last error that was recorded, to aid in debugging. | | | |
| * | | | |
| * Currently, only a subset of the BLAS core functions is implemented. | | | |
| * | | | |
| * The interface to the CUBLAS library is the header file cublas.h. | | | |
| * Applications using CUBLAS need to link against the DSO cublas.so | | | |
| * (Linux) or the DLL cublas.dll (Win32). | | | |
| */ | | */ | |
| | | | |
| #if !defined(CUBLAS_H_) | | #if !defined(CUBLAS_H_) | |
| #define CUBLAS_H_ | | #define CUBLAS_H_ | |
| | | | |
|
| #ifdef __MULTI_CORE__ | | #include <cuda_runtime.h> | |
| #error CUBLAS not supported on multicore | | #include "cublas_api.h" | |
| #endif | | | |
| | | | |
| #ifndef CUBLASAPI | | | |
| #ifdef _WIN32 | | | |
| #define CUBLASAPI __stdcall | | | |
| #else | | | |
| #define CUBLASAPI | | | |
| #endif | | | |
| #endif | | | |
| | | | |
| #if defined(__cplusplus) | | #if defined(__cplusplus) | |
| extern "C" { | | extern "C" { | |
|
| #endif /* __cplusplus */ | | #endif | |
| | | | |
| #include "driver_types.h" | | | |
| #include "cuComplex.h" /* import complex data type */ | | | |
| | | | |
| /* CUBLAS status returns */ | | | |
| #define CUBLAS_STATUS_SUCCESS 0x00000000 | | | |
| #define CUBLAS_STATUS_NOT_INITIALIZED 0x00000001 | | | |
| #define CUBLAS_STATUS_ALLOC_FAILED 0x00000003 | | | |
| #define CUBLAS_STATUS_INVALID_VALUE 0x00000007 | | | |
| #define CUBLAS_STATUS_ARCH_MISMATCH 0x00000008 | | | |
| #define CUBLAS_STATUS_MAPPING_ERROR 0x0000000B | | | |
| #define CUBLAS_STATUS_EXECUTION_FAILED 0x0000000D | | | |
| #define CUBLAS_STATUS_INTERNAL_ERROR 0x0000000E | | | |
| | | | |
| /* CUBLAS data types */ | | /* CUBLAS data types */ | |
|
| typedef unsigned int cublasStatus; | | #define cublasStatus cublasStatus_t | |
| | | | |
| /* CUBLAS helper functions */ | | | |
| | | | |
|
| /* | | | |
| * cublasStatus | | | |
| * cublasInit (void) | | | |
| * | | | |
| * initializes the CUBLAS library and must be called before any other | | | |
| * CUBLAS API function is invoked. It allocates hardware resources | | | |
| * necessary for accessing the GPU. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_ALLOC_FAILED if resources could not be allocated | | | |
| * CUBLAS_STATUS_SUCCESS if CUBLAS library initialized successfull | | | |
| y | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasInit (void); | | cublasStatus CUBLASAPI cublasInit (void); | |
|
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasShutdown (void) | | | |
| * | | | |
| * releases CPU-side resources used by the CUBLAS library. The release of | | | |
| * GPU-side resources may be deferred until the application shuts down. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_SUCCESS if CUBLAS library shut down successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasShutdown (void); | | cublasStatus CUBLASAPI cublasShutdown (void); | |
|
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasGetError (void) | | | |
| * | | | |
| * returns the last error that occurred on invocation of any of the | | | |
| * CUBLAS BLAS functions. While the CUBLAS helper functions return status | | | |
| * directly, the BLAS functions do not do so for improved | | | |
| * compatibility with existing environments that do not expect BLAS | | | |
| * functions to return status. Reading the error status via | | | |
| * cublasGetError() resets the internal error state to | | | |
| * CUBLAS_STATUS_SUCCESS. | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasGetError (void); | | cublasStatus CUBLASAPI cublasGetError (void); | |
|
| | | cublasHandle_t CUBLASAPI cublasGetCurrentCtx(void); | |
| | | cublasStatus_t CUBLASAPI cublasSetError (cublasStatus_t error); | |
| | | | |
|
| /* | | static __inline__ cublasStatus CUBLASAPI cublasGetVersion(int *version) | |
| * cublasStatus | | { | |
| * cublasAlloc (int n, int elemSize, void **devicePtr) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | return cublasGetVersion_v2(handle, version); | |
| * creates an object in GPU memory space capable of holding an array of | | } | |
| * n elements, where each element requires elemSize bytes of storage. If | | | |
| * the function call is successful, a pointer to the object in GPU memory | | | |
| * space is placed in devicePtr. Note that this is a device pointer that | | | |
| * cannot be dereferenced in host code. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n <= 0, or elemSize <= 0 | | | |
| * CUBLAS_STATUS_ALLOC_FAILED if the object could not be allocated due | | | |
| to | | | |
| * lack of resources. | | | |
| * CUBLAS_STATUS_SUCCESS if storage was successfully allocated | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasAlloc (int n, int elemSize, void **devicePtr); | | | |
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasFree (const void *devicePtr) | | | |
| * | | | |
| * destroys the object in GPU memory space pointed to by devicePtr. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INTERNAL_ERROR if the object could not be deallocated | | | |
| * CUBLAS_STATUS_SUCCESS if object was destroyed successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasFree (const void *devicePtr); | | | |
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasSetVector (int n, int elemSize, const void *x, int incx, | | | |
| * void *y, int incy) | | | |
| * | | | |
| * copies n elements from a vector x in CPU memory space to a vector y | | | |
| * in GPU memory space. Elements in both vectors are assumed to have a | | | |
| * size of elemSize bytes. Storage spacing between consecutive elements | | | |
| * is incx for the source vector x and incy for the destination vector | | | |
| * y. In general, y points to an object, or part of an object, allocated | | | |
| * via cublasAlloc(). Column major format for two-dimensional matrices | | | |
| * is assumed throughout CUBLAS. Therefore, if the increment for a vector | | | |
| * is equal to 1, this access a column vector while using an increment | | | |
| * equal to the leading dimension of the respective matrix accesses a | | | |
| * row vector. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library not been initialized | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx, incy, or elemSize <= 0 | | | |
| * CUBLAS_STATUS_MAPPING_ERROR if an error occurred accessing GPU memory | | | |
| * CUBLAS_STATUS_SUCCESS if the operation completed successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasSetVector (int n, int elemSize, const void *x, | | | |
| int incx, void *devicePtr, int incy | | | |
| ); | | | |
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasGetVector (int n, int elemSize, const void *x, int incx, | | | |
| * void *y, int incy) | | | |
| * | | | |
| * copies n elements from a vector x in GPU memory space to a vector y | | | |
| * in CPU memory space. Elements in both vectors are assumed to have a | | | |
| * size of elemSize bytes. Storage spacing between consecutive elements | | | |
| * is incx for the source vector x and incy for the destination vector | | | |
| * y. In general, x points to an object, or part of an object, allocated | | | |
| * via cublasAlloc(). Column major format for two-dimensional matrices | | | |
| * is assumed throughout CUBLAS. Therefore, if the increment for a vector | | | |
| * is equal to 1, this access a column vector while using an increment | | | |
| * equal to the leading dimension of the respective matrix accesses a | | | |
| * row vector. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library not been initialized | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx, incy, or elemSize <= 0 | | | |
| * CUBLAS_STATUS_MAPPING_ERROR if an error occurred accessing GPU memory | | | |
| * CUBLAS_STATUS_SUCCESS if the operation completed successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasGetVector (int n, int elemSize, const void *x, | | | |
| int incx, void *y, int incy); | | | |
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasSetMatrix (int rows, int cols, int elemSize, const void *A, | | | |
| * int lda, void *B, int ldb) | | | |
| * | | | |
| * copies a tile of rows x cols elements from a matrix A in CPU memory | | | |
| * space to a matrix B in GPU memory space. Each element requires storage | | | |
| * of elemSize bytes. Both matrices are assumed to be stored in column | | | |
| * major format, with the leading dimension (i.e. number of rows) of | | | |
| * source matrix A provided in lda, and the leading dimension of matrix B | | | |
| * provided in ldb. In general, B points to an object, or part of an | | | |
| * object, that was allocated via cublasAlloc(). | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if rows or cols < 0, or elemSize, lda, or | | | |
| * ldb <= 0 | | | |
| * CUBLAS_STATUS_MAPPING_ERROR if error occurred accessing GPU memory | | | |
| * CUBLAS_STATUS_SUCCESS if the operation completed successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasSetMatrix (int rows, int cols, int elemSize, | | | |
| const void *A, int lda, void *B, | | | |
| int ldb); | | | |
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasGetMatrix (int rows, int cols, int elemSize, const void *A, | | | |
| * int lda, void *B, int ldb) | | | |
| * | | | |
| * copies a tile of rows x cols elements from a matrix A in GPU memory | | | |
| * space to a matrix B in CPU memory space. Each element requires storage | | | |
| * of elemSize bytes. Both matrices are assumed to be stored in column | | | |
| * major format, with the leading dimension (i.e. number of rows) of | | | |
| * source matrix A provided in lda, and the leading dimension of matrix B | | | |
| * provided in ldb. In general, A points to an object, or part of an | | | |
| * object, that was allocated via cublasAlloc(). | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if rows, cols, eleSize, lda, or ldb <= 0 | | | |
| * CUBLAS_STATUS_MAPPING_ERROR if error occurred accessing GPU memory | | | |
| * CUBLAS_STATUS_SUCCESS if the operation completed successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasGetMatrix (int rows, int cols, int elemSize, | | | |
| const void *A, int lda, void *B, | | | |
| int ldb); | | | |
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasSetKernelStream ( cudaStream_t stream ) | | | |
| * | | | |
| * set the CUBLAS stream in which all subsequent CUBLAS kernel launches wil | | | |
| l run. | | | |
| * By default, if the CUBLAS stream is not set, all kernels will use the NU | | | |
| LL | | | |
| * stream. This routine can be used to change the stream between kernels la | | | |
| unches | | | |
| * and can be used also to set the CUBLAS stream back to NULL. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_SUCCESS if stream set successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasSetKernelStream (cudaStream_t stream); | | | |
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasSetVectorAsync ( int n, int elemSize, const void *x, int incx, | | | |
| * void *y, int incy, cudaStream_t stream ); | | | |
| * | | | |
| * cublasSetVectorAsync has the same functionnality as cublasSetVector | | | |
| * but the transfer is done asynchronously within the CUDA stream passed | | | |
| * in parameter. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library not been initialized | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx, incy, or elemSize <= 0 | | | |
| * CUBLAS_STATUS_MAPPING_ERROR if an error occurred accessing GPU memory | | | |
| * CUBLAS_STATUS_SUCCESS if the operation completed successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasSetVectorAsync (int n, int elemSize, | | | |
| const void *hostPtr, int incx, | | | |
| void *devicePtr, int incy, | | | |
| cudaStream_t stream); | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasGetVectorAsync( int n, int elemSize, const void *x, int incx, | | | |
| * void *y, int incy, cudaStream_t stream) | | | |
| * | | | |
| * cublasGetVectorAsync has the same functionnality as cublasGetVector | | | |
| * but the transfer is done asynchronously within the CUDA stream passed | | | |
| * in parameter. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library not been initialized | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx, incy, or elemSize <= 0 | | | |
| * CUBLAS_STATUS_MAPPING_ERROR if an error occurred accessing GPU memory | | | |
| * CUBLAS_STATUS_SUCCESS if the operation completed successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasGetVectorAsync (int n, int elemSize, | | | |
| const void *devicePtr, int inc | | | |
| x, | | | |
| void *hostPtr, int incy, | | | |
| cudaStream_t stream); | | | |
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasSetMatrixAsync (int rows, int cols, int elemSize, const void *A, | | | |
| * int lda, void *B, int ldb, cudaStream_t stream) | | | |
| * | | | |
| * cublasSetMatrixAsync has the same functionnality as cublasSetMatrix | | | |
| * but the transfer is done asynchronously within the CUDA stream passed | | | |
| * in parameter. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if rows or cols < 0, or elemSize, lda, or | | | |
| * ldb <= 0 | | | |
| * CUBLAS_STATUS_MAPPING_ERROR if error occurred accessing GPU memory | | | |
| * CUBLAS_STATUS_SUCCESS if the operation completed successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasSetMatrixAsync (int rows, int cols, int elemSi | | | |
| ze, | | | |
| const void *A, int lda, void * | | | |
| B, | | | |
| int ldb, cudaStream_t stream); | | | |
| | | | |
| /* | | | |
| * cublasStatus | | | |
| * cublasGetMatrixAsync (int rows, int cols, int elemSize, const void *A, | | | |
| * int lda, void *B, int ldb, cudaStream_t stream) | | | |
| * | | | |
| * cublasGetMatrixAsync has the same functionnality as cublasGetMatrix | | | |
| * but the transfer is done asynchronously within the CUDA stream passed | | | |
| * in parameter. | | | |
| * | | | |
| * Return Values | | | |
| * ------------- | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if rows, cols, eleSize, lda, or ldb <= 0 | | | |
| * CUBLAS_STATUS_MAPPING_ERROR if error occurred accessing GPU memory | | | |
| * CUBLAS_STATUS_SUCCESS if the operation completed successfully | | | |
| */ | | | |
| cublasStatus CUBLASAPI cublasGetMatrixAsync (int rows, int cols, int elemSi | | | |
| ze, | | | |
| const void *A, int lda, void * | | | |
| B, | | | |
| int ldb, cudaStream_t stream); | | | |
| | | | |
| /* ---------------- CUBLAS single-precision BLAS1 functions --------------- | | | |
| - */ | | | |
| | | | |
| /* | | | |
| * int | | | |
| * cublasIsamax (int n, const float *x, int incx) | | | |
| * | | | |
| * finds the smallest index of the maximum magnitude element of single | | | |
| * precision vector x; that is, the result is the first i, i = 0 to n - 1, | | | |
| * that maximizes abs(x[1 + i * incx])). | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the smallest index (0 if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/isamax.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| int CUBLASAPI cublasIsamax (int n, const float *x, int incx); | | | |
| | | | |
| /* | | | |
| * int | | | |
| * cublasIsamin (int n, const float *x, int incx) | | | |
| * | | | |
| * finds the smallest index of the minimum magnitude element of single | | | |
| * precision vector x; that is, the result is the first i, i = 0 to n - 1, | | | |
| * that minimizes abs(x[1 + i * incx])). | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the smallest index (0 if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/scilib/blass.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| int CUBLASAPI cublasIsamin (int n, const float *x, int incx); | | | |
| | | | |
| /* | | | |
| * float | | | |
| * cublasSasum (int n, const float *x, int incx) | | | |
| * | | | |
| * computes the sum of the absolute values of the elements of single | | | |
| * precision vector x; that is, the result is the sum from i = 0 to n - 1 o | | | |
| f | | | |
| * abs(x[1 + i * incx]). | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the single precision sum of absolute values | | | |
| * (0 if n <= 0 or incx <= 0, or if an error occurs) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/sasum.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| float CUBLASAPI cublasSasum (int n, const float *x, int incx); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSaxpy (int n, float alpha, const float *x, int incx, float *y, | | | |
| * int incy) | | | |
| * | | | |
| * multiplies single precision vector x by single precision scalar alpha | | | |
| * and adds the result to single precision vector y; that is, it overwrites | | | |
| * single precision y with single precision alpha * x + y. For i = 0 to n - | | | |
| 1, | | | |
| * it replaces y[ly + i * incy] with alpha * x[lx + i * incx] + y[ly + i * | | | |
| * incy], where lx = 1 if incx >= 0, else lx = 1 +(1 - n) * incx, and ly is | | | |
| * defined in a similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * alpha single precision scalar multiplier | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y single precision result (unchanged if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/saxpy.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSaxpy (int n, float alpha, const float *x, int incx, | | | |
| float *y, int incy); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasScopy (int n, const float *x, int incx, float *y, int incy) | | | |
| * | | | |
| * copies the single precision vector x to the single precision vector y. F | | | |
| or | | | |
| * i = 0 to n-1, copies x[lx + i * incx] to y[ly + i * incy], where lx = 1 | | | |
| if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and ly is defined in a similar | | | |
| * way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y contains single precision vector x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/scopy.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasScopy (int n, const float *x, int incx, float *y, | | | |
| int incy); | | | |
| | | | |
| /* | | | |
| * float | | | |
| * cublasSdot (int n, const float *x, int incx, const float *y, int incy) | | | |
| * | | | |
| * computes the dot product of two single precision vectors. It returns the | | | |
| * dot product of the single precision vectors x and y if successful, and | | | |
| * 0.0f otherwise. It computes the sum for i = 0 to n - 1 of x[lx + i * | | | |
| * incx] * y[ly + i * incy], where lx = 1 if incx >= 0, else lx = 1 + (1 - | | | |
| n) | | | |
| * *incx, and ly is defined in a similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns single precision dot product (zero if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/sdot.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has nor been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to execute on GPU | | | |
| */ | | | |
| float CUBLASAPI cublasSdot (int n, const float *x, int incx, const float *y | | | |
| , | | | |
| int incy); | | | |
| | | | |
| /* | | | |
| * float | | | |
| * cublasSnrm2 (int n, const float *x, int incx) | | | |
| * | | | |
| * computes the Euclidean norm of the single precision n-vector x (with | | | |
| * storage increment incx). This code uses a multiphase model of | | | |
| * accumulation to avoid intermediate underflow and overflow. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns Euclidian norm (0 if n <= 0 or incx <= 0, or if an error occurs) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/snrm2.f | | | |
| * Reference: http://www.netlib.org/slatec/lin/snrm2.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| float CUBLASAPI cublasSnrm2 (int n, const float *x, int incx); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSrot (int n, float *x, int incx, float *y, int incy, float sc, | | | |
| * float ss) | | | |
| * | | | |
| * multiplies a 2x2 matrix ( sc ss) with the 2xn matrix ( transpose(x) ) | | | |
| * (-ss sc) ( transpose(y) ) | | | |
| * | | | |
| * The elements of x are in x[lx + i * incx], i = 0 ... n - 1, where lx = 1 | | | |
| if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and similarly for y using ly an | | | |
| d | | | |
| * incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * sc element of rotation matrix | | | |
| * ss element of rotation matrix | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x rotated vector x (unchanged if n <= 0) | | | |
| * y rotated vector y (unchanged if n <= 0) | | | |
| * | | | |
| * Reference http://www.netlib.org/blas/srot.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSrot (int n, float *x, int incx, float *y, int incy, | | | |
| float sc, float ss); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSrotg (float *host_sa, float *host_sb, float *host_sc, float *host | | | |
| _ss) | | | |
| * | | | |
| * constructs the Givens tranformation | | | |
| * | | | |
| * ( sc ss ) | | | |
| * G = ( ) , sc^2 + ss^2 = 1, | | | |
| * (-ss sc ) | | | |
| * | | | |
| * which zeros the second entry of the 2-vector transpose(sa, sb). | | | |
| * | | | |
| * The quantity r = (+/-) sqrt (sa^2 + sb^2) overwrites sa in storage. The | | | |
| * value of sb is overwritten by a value z which allows sc and ss to be | | | |
| * recovered by the following algorithm: | | | |
| * | | | |
| * if z=1 set sc = 0.0 and ss = 1.0 | | | |
| * if abs(z) < 1 set sc = sqrt(1-z^2) and ss = z | | | |
| * if abs(z) > 1 set sc = 1/z and ss = sqrt(1-sc^2) | | | |
| * | | | |
| * The function srot (n, x, incx, y, incy, sc, ss) normally is called next | | | |
| * to apply the transformation to a 2 x n matrix. | | | |
| * Note that this function is provided for completeness and run exclusively | | | |
| * on the Host. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * sa single precision scalar | | | |
| * sb single precision scalar | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * sa single precision r | | | |
| * sb single precision z | | | |
| * sc single precision result | | | |
| * ss single precision result | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/srotg.f | | | |
| * | | | |
| * This function does not set any error status. | | | |
| */ | | | |
| void CUBLASAPI cublasSrotg (float *host_sa, float *host_sb, float *host_sc, | | | |
| float *host_ss); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSrotm (int n, float *x, int incx, float *y, int incy, | | | |
| * const float* sparam) | | | |
| * | | | |
| * applies the modified Givens transformation, h, to the 2 x n matrix | | | |
| * | | | |
| * ( transpose(x) ) | | | |
| * ( transpose(y) ) | | | |
| * | | | |
| * The elements of x are in x[lx + i * incx], i = 0 to n-1, where lx = 1 if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and similarly for y using ly an | | | |
| d | | | |
| * incy. With sparam[0] = sflag, h has one of the following forms: | | | |
| * | | | |
| * sflag = -1.0f sflag = 0.0f sflag = 1.0f sflag = -2.0f | | | |
| * | | | |
| * (sh00 sh01) (1.0f sh01) (sh00 1.0f) (1.0f 0.0f) | | | |
| * h = ( ) ( ) ( ) ( ) | | | |
| * (sh10 sh11) (sh10 1.0f) (-1.0f sh11) (0.0f 1.0f) | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * sparam 5-element vector. sparam[0] is sflag described above. sparam[1] | | | |
| * through sparam[4] contain the 2x2 rotation matrix h: sparam[1] | | | |
| * contains sh00, sparam[2] contains sh10, sparam[3] contains sh01, | | | |
| * and sprams[4] contains sh11. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x rotated vector x (unchanged if n <= 0) | | | |
| * y rotated vector y (unchanged if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/srotm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSrotm(int n, float *x, int incx, float *y, int incy, | | | |
| const float* sparam); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSrotmg (float *host_psd1, float *host_psd2, float *host_psx1, cons | | | |
| t float *host_psy1, | | | |
| * float *host_sparam) | | | |
| * | | | |
| * constructs the modified Givens transformation matrix h which zeros | | | |
| * the second component of the 2-vector transpose(sqrt(sd1)*sx1,sqrt(sd2)*s | | | |
| y1). | | | |
| * With sparam[0] = sflag, h has one of the following forms: | | | |
| * | | | |
| * sflag = -1.0f sflag = 0.0f sflag = 1.0f sflag = -2.0f | | | |
| * | | | |
| * (sh00 sh01) (1.0f sh01) (sh00 1.0f) (1.0f 0.0f) | | | |
| * h = ( ) ( ) ( ) ( ) | | | |
| * (sh10 sh11) (sh10 1.0f) (-1.0f sh11) (0.0f 1.0f) | | | |
| * | | | |
| * sparam[1] through sparam[4] contain sh00, sh10, sh01, sh11, | | | |
| * respectively. Values of 1.0f, -1.0f, or 0.0f implied by the value | | | |
| * of sflag are not stored in sparam. | | | |
| * Note that this function is provided for completeness and run exclusively | | | |
| * on the Host. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * sd1 single precision scalar | | | |
| * sd2 single precision scalar | | | |
| * sx1 single precision scalar | | | |
| * sy1 single precision scalar | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * sd1 changed to represent the effect of the transformation | | | |
| * sd2 changed to represent the effect of the transformation | | | |
| * sx1 changed to represent the effect of the transformation | | | |
| * sparam 5-element vector. sparam[0] is sflag described above. sparam[1] | | | |
| * through sparam[4] contain the 2x2 rotation matrix h: sparam[1] | | | |
| * contains sh00, sparam[2] contains sh10, sparam[3] contains sh01, | | | |
| * and sprams[4] contains sh11. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/srotmg.f | | | |
| * | | | |
| * This functions does not set any error status. | | | |
| */ | | | |
| void CUBLASAPI cublasSrotmg (float *host_sd1, float *host_sd2, float *host_ | | | |
| sx1, | | | |
| const float *host_sy1, float* host_sparam); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * sscal (int n, float alpha, float *x, int incx) | | | |
| * | | | |
| * replaces single precision vector x with single precision alpha * x. For | | | |
| i | | | |
| * = 0 to n - 1, it replaces x[ix + i * incx] with alpha * x[ix + i * incx] | | | |
| , | | | |
| * where ix = 1 if incx >= 0, else ix = 1 + (1 - n) * incx. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * alpha single precision scalar multiplier | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x single precision result (unchanged if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/sscal.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSscal (int n, float alpha, float *x, int incx); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSswap (int n, float *x, int incx, float *y, int incy) | | | |
| * | | | |
| * replaces single precision vector x with single precision alpha * x. For | | | |
| i | | | |
| * = 0 to n - 1, it replaces x[ix + i * incx] with alpha * x[ix + i * incx] | | | |
| , | | | |
| * where ix = 1 if incx >= 0, else ix = 1 + (1 - n) * incx. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * alpha single precision scalar multiplier | | | |
| * x single precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x single precision result (unchanged if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/sscal.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSswap (int n, float *x, int incx, float *y, int incy); | | | |
| | | | |
| /* ----------------- CUBLAS single-complex BLAS1 functions ---------------- | | | |
| - */ | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasCaxpy (int n, cuComplex alpha, const cuComplex *x, int incx, | | | |
| * cuComplex *y, int incy) | | | |
| * | | | |
| * multiplies single-complex vector x by single-complex scalar alpha and ad | | | |
| ds | | | |
| * the result to single-complex vector y; that is, it overwrites single-com | | | |
| plex | | | |
| * y with single-complex alpha * x + y. For i = 0 to n - 1, it replaces | | | |
| * y[ly + i * incy] with alpha * x[lx + i * incx] + y[ly + i * incy], where | | | |
| * lx = 0 if incx >= 0, else lx = 1 + (1 - n) * incx, and ly is defined in | | | |
| a | | | |
| * similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * alpha single-complex scalar multiplier | | | |
| * x single-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single-complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y single-complex result (unchanged if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/caxpy.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCaxpy (int n, cuComplex alpha, const cuComplex *x, | | | |
| int incx, cuComplex *y, int incy); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasCcopy (int n, const cuComplex *x, int incx, cuComplex *y, int incy | | | |
| ) | | | |
| * | | | |
| * copies the single-complex vector x to the single-complex vector y. For | | | |
| * i = 0 to n-1, copies x[lx + i * incx] to y[ly + i * incy], where lx = 1 | | | |
| if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and ly is defined in a similar | | | |
| * way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x single-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single-complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y contains single complex vector x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ccopy.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCcopy (int n, const cuComplex *x, int incx, cuComplex | | | |
| *y, | | | |
| int incy); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZcopy (int n, const cuDoubleComplex *x, int incx, cuDoubleComplex | | | |
| *y, int incy) | | | |
| * | | | |
| * copies the double-complex vector x to the double-complex vector y. For | | | |
| * i = 0 to n-1, copies x[lx + i * incx] to y[ly + i * incy], where lx = 1 | | | |
| if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and ly is defined in a similar | | | |
| * way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x double-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y contains double complex vector x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zcopy.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZcopy (int n, const cuDoubleComplex *x, int incx, cuDo | | | |
| ubleComplex *y, | | | |
| int incy); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasCscal (int n, cuComplex alpha, cuComplex *x, int incx) | | | |
| * | | | |
| * replaces single-complex vector x with single-complex alpha * x. For i | | | |
| * = 0 to n - 1, it replaces x[ix + i * incx] with alpha * x[ix + i * incx] | | | |
| , | | | |
| * where ix = 1 if incx >= 0, else ix = 1 + (1 - n) * incx. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * alpha single-complex scalar multiplier | | | |
| * x single-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x single-complex result (unchanged if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cscal.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCscal (int n, cuComplex alpha, cuComplex *x, int incx) | | | |
| ; | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasCrotg (cuComplex *host_ca, cuComplex cb, float *host_sc, cuComplex | | | |
| *host_cs) | | | |
| * | | | |
| * constructs the complex Givens tranformation | | | |
| * | | | |
| * ( sc cs ) | | | |
| * G = ( ) , sc^2 + cabs(cs)^2 = 1, | | | |
| * (-cs sc ) | | | |
| * | | | |
| * which zeros the second entry of the complex 2-vector transpose(ca, cb). | | | |
| * | | | |
| * The quantity ca/cabs(ca)*norm(ca,cb) overwrites ca in storage. The | | | |
| * function crot (n, x, incx, y, incy, sc, cs) is normally called next | | | |
| * to apply the transformation to a 2 x n matrix. | | | |
| * Note that this function is provided for completeness and run exclusively | | | |
| * on the Host. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * ca single-precision complex precision scalar | | | |
| * cb single-precision complex scalar | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * ca single-precision complex ca/cabs(ca)*norm(ca,cb) | | | |
| * sc single-precision cosine component of rotation matrix | | | |
| * cs single-precision complex sine component of rotation matrix | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/crotg.f | | | |
| * | | | |
| * This function does not set any error status. | | | |
| */ | | | |
| __host__ void CUBLASAPI cublasCrotg (cuComplex *host_ca, cuComplex cb, floa | | | |
| t *host_sc, | | | |
| cuComplex *host_cs); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasCrot (int n, cuComplex *x, int incx, cuComplex *y, int incy, float | | | |
| sc, | | | |
| * cuComplex cs) | | | |
| * | | | |
| * multiplies a 2x2 matrix ( sc cs) with the 2xn matrix ( transpose(x | | | |
| ) ) | | | |
| * (-conj(cs) sc) ( transpose(y | | | |
| ) ) | | | |
| * | | | |
| * The elements of x are in x[lx + i * incx], i = 0 ... n - 1, where lx = 1 | | | |
| if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and similarly for y using ly an | | | |
| d | | | |
| * incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x single-precision complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single-precision complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * sc single-precision cosine component of rotation matrix | | | |
| * cs single-precision complex sine component of rotation matrix | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x rotated single-precision complex vector x (unchanged if n <= 0) | | | |
| * y rotated single-precision complex vector y (unchanged if n <= 0) | | | |
| * | | | |
| * Reference: http://netlib.org/lapack/explore-html/crot.f.html | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCrot (int n, cuComplex *x, int incx, cuComplex *y, | | | |
| int incy, float c, cuComplex s); | | | |
| | | | |
|
| /* | | static __inline__ cublasStatus CUBLASAPI cublasAlloc (int n, int elemSize, | |
| * void | | void **devicePtr) | |
| * csrot (int n, cuComplex *x, int incx, cuCumplex *y, int incy, float c, | | { | |
| * float s) | | cudaError_t cudaStat; | |
| * | | unsigned long long int testSize; | |
| * multiplies a 2x2 rotation matrix ( c s) with a 2xn matrix ( transpose(x) | | size_t allocSize; | |
| ) | | *devicePtr = 0; | |
| * (-s c) ( transpose(y) | | | |
| ) | | | |
| * | | | |
| * The elements of x are in x[lx + i * incx], i = 0 ... n - 1, where lx = 1 | | | |
| if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and similarly for y using ly an | | | |
| d | | | |
| * incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x single-precision complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single-precision complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * c cosine component of rotation matrix | | | |
| * s sine component of rotation matrix | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x rotated vector x (unchanged if n <= 0) | | | |
| * y rotated vector y (unchanged if n <= 0) | | | |
| * | | | |
| * Reference http://www.netlib.org/blas/csrot.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCsrot (int n, cuComplex *x, int incx, cuComplex *y, | | | |
| int incy, float c, float s); | | | |
| | | | |
|
| /* | | if ((n <= 0) || (elemSize <= 0)) { | |
| * void | | return CUBLAS_STATUS_INVALID_VALUE; | |
| * cublasCsscal (int n, float alpha, cuComplex *x, int incx) | | } | |
| * | | allocSize = (size_t)elemSize * (size_t)n; | |
| * replaces single-complex vector x with single-complex alpha * x. For i | | testSize = (unsigned long long int)elemSize * (unsigned long long int)n | |
| * = 0 to n - 1, it replaces x[ix + i * incx] with alpha * x[ix + i * incx] | | ; | |
| , | | if (testSize != (unsigned long long int)allocSize) { /* size_t overflow | |
| * where ix = 1 if incx >= 0, else ix = 1 + (1 - n) * incx. | | ! */ | |
| * | | return CUBLAS_STATUS_ALLOC_FAILED; | |
| * Input | | } | |
| * ----- | | cudaStat = cudaMalloc (devicePtr, allocSize); | |
| * n number of elements in input vectors | | if (cudaStat != cudaSuccess) { | |
| * alpha single precision scalar multiplier | | return CUBLAS_STATUS_ALLOC_FAILED; | |
| * x single-complex vector with n elements | | } | |
| * incx storage spacing between elements of x | | return CUBLAS_STATUS_SUCCESS; | |
| * | | } | |
| * Output | | | |
| * ------ | | | |
| * x single-complex result (unchanged if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/csscal.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCsscal (int n, float alpha, cuComplex *x, int incx); | | | |
| | | | |
|
| /* | | static __inline__ cublasStatus CUBLASAPI cublasFree (void *devicePtr) | |
| * void | | { | |
| * cublasCswap (int n, const cuComplex *x, int incx, cuComplex *y, int incy | | cudaError_t cudaStat; | |
| ) | | | |
| * | | | |
| * interchanges the single-complex vector x with the single-complex vector | | | |
| y. | | | |
| * For i = 0 to n-1, interchanges x[lx + i * incx] with y[ly + i * incy], w | | | |
| here | | | |
| * lx = 1 if incx >= 0, else lx = 1 + (1 - n) * incx, and ly is defined in | | | |
| a | | | |
| * similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x single-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single-complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x contains-single complex vector y | | | |
| * y contains-single complex vector x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cswap.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCswap (int n, cuComplex *x, int incx, cuComplex *y, | | | |
| int incy); | | | |
| | | | |
|
| /* | | if (devicePtr) { | |
| * void | | cudaStat = cudaFree (devicePtr); | |
| * cublasZswap (int n, const cuDoubleComplex *x, int incx, cuDoubleComplex | | if (cudaStat != cudaSuccess) { | |
| *y, int incy) | | /* should never fail, except when there is internal corruption* | |
| * | | / | |
| * interchanges the double-complex vector x with the double-complex vector | | return CUBLAS_STATUS_INTERNAL_ERROR; | |
| y. | | } | |
| * For i = 0 to n-1, interchanges x[lx + i * incx] with y[ly + i * incy], w | | } | |
| here | | return CUBLAS_STATUS_SUCCESS; | |
| * lx = 1 if incx >= 0, else lx = 1 + (1 - n) * incx, and ly is defined in | | } | |
| a | | | |
| * similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x double-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x contains-double complex vector y | | | |
| * y contains-double complex vector x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zswap.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZswap (int n, cuDoubleComplex *x, int incx, cuDoubleCo | | | |
| mplex *y, | | | |
| int incy); | | | |
| | | | |
|
| /* | | static __inline__ cublasStatus CUBLASAPI cublasSetKernelStream (cudaStream_ | |
| * cuComplex | | t stream) | |
| * cdotu (int n, const cuComplex *x, int incx, const cuComplex *y, int incy | | { | |
| ) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | return cublasSetStream_v2(handle, stream); | |
| * computes the dot product of two single-complex vectors. It returns the | | } | |
| * dot product of the single-complex vectors x and y if successful, and com | | | |
| plex | | | |
| * zero otherwise. It computes the sum for i = 0 to n - 1 of x[lx + i * inc | | | |
| x] * | | | |
| * y[ly + i * incy], where lx = 1 if incx >= 0, else lx = 1 + (1 - n) * inc | | | |
| x; | | | |
| * ly is defined in a similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x single-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single-complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns single-complex dot product (zero if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cdotu.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has nor been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to execute on GPU | | | |
| */ | | | |
| cuComplex CUBLASAPI cublasCdotu (int n, const cuComplex *x, int incx, | | | |
| const cuComplex *y, int incy); | | | |
| | | | |
|
| /* | | /* Helper functions */ | |
| * cuComplex | | static __inline__ cublasOperation_t convertToOp( char trans ) | |
| * cublasCdotc (int n, const cuComplex *x, int incx, const cuComplex *y, | | { | |
| * int incy) | | switch(trans) { | |
| * | | case 'N': | |
| * computes the dot product of two single-complex vectors. It returns the | | case 'n': | |
| * dot product of the single-complex vectors x and y if successful, and com | | return CUBLAS_OP_N; | |
| plex | | case 't': | |
| * zero otherwise. It computes the sum for i = 0 to n - 1 of x[lx + i * inc | | case 'T': | |
| x] * | | return CUBLAS_OP_T; | |
| * y[ly + i * incy], where lx = 1 if incx >= 0, else lx = 1 + (1 - n) * inc | | case 'C': | |
| x; | | case 'c': | |
| * ly is defined in a similar way using incy. | | return CUBLAS_OP_C; | |
| * | | default: | |
| * Input | | return CUBLAS_OP_N; | |
| * ----- | | } | |
| * n number of elements in input vectors | | | |
| * x single-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y single-complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns single-complex dot product (zero if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cdotc.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has nor been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to execute on GPU | | | |
| */ | | | |
| cuComplex CUBLASAPI cublasCdotc (int n, const cuComplex *x, int incx, | | | |
| const cuComplex *y, int incy); | | | |
| | | | |
|
| /* | | } | |
| * int | | static __inline__ cublasFillMode_t convertToFillMode( char uplo ) | |
| * cublasIcamax (int n, const float *x, int incx) | | { | |
| * | | switch (uplo) { | |
| * finds the smallest index of the element having maximum absolute value | | case 'U': | |
| * in single-complex vector x; that is, the result is the first i, i = 0 | | case 'u': | |
| * to n - 1 that maximizes abs(real(x[1+i*incx]))+abs(imag(x[1 + i * incx]) | | return CUBLAS_FILL_MODE_UPPER; | |
| ). | | case 'L': | |
| * | | case 'l': | |
| * Input | | default: | |
| * ----- | | return CUBLAS_FILL_MODE_LOWER; | |
| * n number of elements in input vector | | } | |
| * x single-complex vector with n elements | | } | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the smallest index (0 if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/icamax.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| int CUBLASAPI cublasIcamax (int n, const cuComplex *x, int incx); | | | |
| | | | |
|
| /* | | static __inline__ cublasDiagType_t convertToDiagType( char diag ) | |
| * int | | { | |
| * cublasIcamin (int n, const float *x, int incx) | | switch (diag) { | |
| * | | case 'U': | |
| * finds the smallest index of the element having minimum absolute value | | case 'u': | |
| * in single-complex vector x; that is, the result is the first i, i = 0 | | return CUBLAS_DIAG_UNIT; | |
| * to n - 1 that minimizes abs(real(x[1+i*incx]))+abs(imag(x[1 + i * incx]) | | case 'N': | |
| ). | | case 'n': | |
| * | | default: | |
| * Input | | return CUBLAS_DIAG_NON_UNIT; | |
| * ----- | | } | |
| * n number of elements in input vector | | } | |
| * x single-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the smallest index (0 if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: see ICAMAX. | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| int CUBLASAPI cublasIcamin (int n, const cuComplex *x, int incx); | | | |
| | | | |
|
| /* | | static __inline__ cublasSideMode_t convertToSideMode( char side ) | |
| * float | | { | |
| * cublasScasum (int n, const cuDouble *x, int incx) | | switch (side) { | |
| * | | case 'R': | |
| * takes the sum of the absolute values of a complex vector and returns a | | case 'r': | |
| * single precision result. Note that this is not the L1 norm of the vector | | return CUBLAS_SIDE_RIGHT; | |
| . | | case 'L': | |
| * The result is the sum from 0 to n-1 of abs(real(x[ix+i*incx])) + | | case 'l': | |
| * abs(imag(x(ix+i*incx))), where ix = 1 if incx <= 0, else ix = 1+(1-n)*in | | default: | |
| cx. | | return CUBLAS_SIDE_LEFT; | |
| * | | } | |
| * Input | | } | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x single-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the single precision sum of absolute values of real and imaginar | | | |
| y | | | |
| * parts (0 if n <= 0 or incx <= 0, or if an error occurs) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/scasum.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| float CUBLASAPI cublasScasum (int n, const cuComplex *x, int incx); | | | |
| | | | |
|
| /* | | /* ---------------- CUBLAS BLAS1 functions ---------------- */ | |
| * float | | /* NRM2 */ | |
| * cublasScnrm2 (int n, const cuComplex *x, int incx) | | static __inline__ float CUBLASAPI cublasSnrm2 (int n, const float *x, int i | |
| * | | ncx) | |
| * computes the Euclidean norm of the single-complex n-vector x. This code | | { | |
| * uses simple scaling to avoid intermediate underflow and overflow. | | float result; | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Input | | cublasStatus_t error = cublasSnrm2_v2(handle, n, x, incx, &result); | |
| * ----- | | cublasSetError(error); | |
| * n number of elements in input vector | | return result; | |
| * x single-complex vector with n elements | | } | |
| * incx storage spacing between elements of x | | static __inline__ double CUBLASAPI cublasDnrm2 (int n, const double *x, int | |
| * | | incx) | |
| * Output | | { | |
| * ------ | | double result; | |
| * returns Euclidian norm (0 if n <= 0 or incx <= 0, or if an error occurs) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasDnrm2_v2(handle, n, x, incx, &result); | |
| * Reference: http://www.netlib.org/blas/scnrm2.f | | cublasSetError(error); | |
| * | | return result; | |
| * Error status for this function can be retrieved via cublasGetError(). | | } | |
| * | | static __inline__ float CUBLASAPI cublasScnrm2 (int n, const cuComplex *x, | |
| * Error Status | | int incx) | |
| * ------------ | | { | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | float result; | |
| d | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | cublasStatus_t error = cublasScnrm2_v2(handle, n, x, incx, &result); | |
| */ | | cublasSetError(error); | |
| float CUBLASAPI cublasScnrm2 (int n, const cuComplex *x, int incx); | | return result; | |
| | | } | |
| | | static __inline__ double CUBLASAPI cublasDznrm2 (int n, const cuDoubleCompl | |
| | | ex *x, int incx) | |
| | | { | |
| | | double result; | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDznrm2_v2(handle, n, x, incx, &result); | |
| | | cublasSetError(error); | |
| | | return result; | |
| | | } | |
| | | /*------------------------------------------------------------------------* | |
| | | / | |
| | | /* DOT */ | |
| | | static __inline__ float CUBLASAPI cublasSdot (int n, const float *x, int in | |
| | | cx, const float *y, | |
| | | int incy) | |
| | | { | |
| | | float result; | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasSdot_v2(handle, n, x, incx, y, incy, &resu | |
| | | lt); | |
| | | cublasSetError(error); | |
| | | return result; | |
| | | } | |
| | | static __inline__ double CUBLASAPI cublasDdot (int n, const double *x, int | |
| | | incx, const double *y, | |
| | | int incy) | |
| | | { | |
| | | double result; | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDdot_v2(handle, n, x, incx, y, incy, &resu | |
| | | lt); | |
| | | cublasSetError(error); | |
| | | return result; | |
| | | } | |
| | | static __inline__ cuComplex CUBLASAPI cublasCdotu (int n, const cuComplex * | |
| | | x, int incx, const cuComplex *y, | |
| | | int incy) | |
| | | { | |
| | | cuComplex result; | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasCdotu_v2(handle, n, x, incx, y, incy, &res | |
| | | ult); | |
| | | cublasSetError(error); | |
| | | return result; | |
| | | } | |
| | | static __inline__ cuComplex CUBLASAPI cublasCdotc (int n, const cuComplex * | |
| | | x, int incx, const cuComplex *y, | |
| | | int incy) | |
| | | { | |
| | | cuComplex result; | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasCdotc_v2(handle, n, x, incx, y, incy, &res | |
| | | ult); | |
| | | cublasSetError(error); | |
| | | return result; | |
| | | } | |
| | | | |
|
| /* ----------------- CUBLAS double-complex BLAS1 functions ---------------- | | static __inline__ cuDoubleComplex CUBLASAPI cublasZdotu (int n, const cuDou | |
| - */ | | bleComplex *x, int incx, const cuDoubleComplex *y, | |
| | | int incy) | |
| | | { | |
| | | cuDoubleComplex result; | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZdotu_v2(handle, n, x, incx, y, incy, &res | |
| | | ult); | |
| | | cublasSetError(error); | |
| | | return result; | |
| | | } | |
| | | static __inline__ cuDoubleComplex CUBLASAPI cublasZdotc (int n, const cuDou | |
| | | bleComplex *x, int incx, const cuDoubleComplex *y, | |
| | | int incy) | |
| | | { | |
| | | cuDoubleComplex result; | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZdotc_v2(handle, n, x, incx, y, incy, &res | |
| | | ult); | |
| | | cublasSetError(error); | |
| | | return result; | |
| | | } | |
| | | /*------------------------------------------------------------------------* | |
| | | / | |
| | | /* SCAL */ | |
| | | static __inline__ void CUBLASAPI cublasSscal (int n, float alpha, float *x, | |
| | | int incx) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasSscal_v2(handle, n, &alpha, x, incx); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasDscal (int n, double alpha, double * | |
| | | x, int incx) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDscal_v2(handle, n, &alpha, x, incx); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasCscal (int n, cuComplex alpha, cuCom | |
| | | plex *x, int incx) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasCscal_v2(handle, n, &alpha, x, incx); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasZscal (int n, cuDoubleComplex alpha, | |
| | | cuDoubleComplex *x, int incx) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZscal_v2(handle, n, &alpha, x, incx); | |
| | | cublasSetError(error); | |
| | | } | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasCsscal (int n, float alpha, cuComple | |
| * void | | x *x, int incx) | |
| * cublasZaxpy (int n, cuDoubleComplex alpha, const cuDoubleComplex *x, int | | { | |
| incx, | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * cuDoubleComplex *y, int incy) | | cublasStatus_t error = cublasCsscal_v2(handle, n, &alpha, x, incx); | |
| * | | cublasSetError(error); | |
| * multiplies double-complex vector x by double-complex scalar alpha and ad | | } | |
| ds | | static __inline__ void CUBLASAPI cublasZdscal (int n, double alpha, cuDoubl | |
| * the result to double-complex vector y; that is, it overwrites double-com | | eComplex *x, int incx) | |
| plex | | { | |
| * y with double-complex alpha * x + y. For i = 0 to n - 1, it replaces | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * y[ly + i * incy] with alpha * x[lx + i * incx] + y[ly + i * incy], where | | cublasStatus_t error = cublasZdscal_v2(handle, n, &alpha, x, incx); | |
| * lx = 0 if incx >= 0, else lx = 1 + (1 - n) * incx, and ly is defined in | | cublasSetError(error); | |
| a | | } | |
| * similar way using incy. | | /*------------------------------------------------------------------------* | |
| * | | / | |
| * Input | | /* AXPY */ | |
| * ----- | | static __inline__ void CUBLASAPI cublasSaxpy (int n, float alpha, const flo | |
| * n number of elements in input vectors | | at *x, int incx, | |
| * alpha double-complex scalar multiplier | | float *y, int incy) | |
| * x double-complex vector with n elements | | { | |
| * incx storage spacing between elements of x | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * y double-complex vector with n elements | | cublasStatus_t error = cublasSaxpy_v2(handle, n, &alpha, x, incx, y, in | |
| * incy storage spacing between elements of y | | cy); | |
| * | | cublasSetError(error); | |
| * Output | | } | |
| * ------ | | static __inline__ void CUBLASAPI cublasDaxpy (int n, double alpha, const do | |
| * y double-complex result (unchanged if n <= 0) | | uble *x, | |
| * | | int incx, double *y, int incy) | |
| * Reference: http://www.netlib.org/blas/zaxpy.f | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Error status for this function can be retrieved via cublasGetError(). | | cublasStatus_t error = cublasDaxpy_v2(handle, n, &alpha, x, incx, y, in | |
| * | | cy); | |
| * Error Status | | cublasSetError(error); | |
| * ------------ | | } | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | static __inline__ void CUBLASAPI cublasCaxpy (int n, cuComplex alpha, const | |
| d | | cuComplex *x, | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | int incx, cuComplex *y, int incy) | |
| */ | | { | |
| void CUBLASAPI cublasZaxpy (int n, cuDoubleComplex alpha, const cuDoubleCom | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| plex *x, | | cublasStatus_t error = cublasCaxpy_v2(handle, n, &alpha, x, incx, y, in | |
| int incx, cuDoubleComplex *y, int incy); | | cy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasZaxpy (int n, cuDoubleComplex alpha, | |
| | | const cuDoubleComplex *x, | |
| | | int incx, cuDoubleComplex *y, int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZaxpy_v2(handle, n, &alpha, x, incx, y, in | |
| | | cy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | /*------------------------------------------------------------------------* | |
| | | / | |
| | | /* COPY */ | |
| | | static __inline__ void CUBLASAPI cublasScopy (int n, const float *x, int in | |
| | | cx, float *y, | |
| | | int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasScopy_v2(handle,n,x,incx,y,incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasDcopy (int n, const double *x, int i | |
| | | ncx, double *y, | |
| | | int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDcopy_v2(handle,n,x,incx,y,incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasCcopy (int n, const cuComplex *x, in | |
| | | t incx, cuComplex *y, | |
| | | int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasCcopy_v2(handle,n,x,incx,y,incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasZcopy (int n, const cuDoubleComplex | |
| | | *x, int incx, cuDoubleComplex *y, | |
| | | int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZcopy_v2(handle,n,x,incx,y,incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | /*------------------------------------------------------------------------* | |
| | | / | |
| | | /* SWAP */ | |
| | | static __inline__ void CUBLASAPI cublasSswap (int n, float *x, int incx, fl | |
| | | oat *y, int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasSswap_v2(handle,n,x,incx,y,incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasDswap (int n, double *x, int incx, d | |
| | | ouble *y, int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDswap_v2(handle,n,x,incx,y,incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasCswap (int n, cuComplex *x, int incx | |
| | | , cuComplex *y, int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasCswap_v2(handle,n,x,incx,y,incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasZswap (int n, cuDoubleComplex *x, in | |
| | | t incx, cuDoubleComplex *y, int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZswap_v2(handle,n,x,incx,y,incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | /*------------------------------------------------------------------------* | |
| | | / | |
| | | /* AMAX */ | |
| | | static __inline__ int CUBLASAPI cublasIsamax (int n, const float *x, int in | |
| | | cx) | |
| | | { | |
| | | int result; | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasIsamax_v2(handle, n, x, incx, &result); | |
| | | cublasSetError(error); | |
| | | return result; | |
| | | } | |
| | | | |
|
| /* | | static __inline__ int CUBLASAPI cublasIdamax (int n, const double *x, int i | |
| * cuDoubleComplex | | ncx) | |
| * zdotu (int n, const cuDoubleComplex *x, int incx, const cuDoubleComplex | | { | |
| *y, int incy) | | int result; | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * computes the dot product of two double-complex vectors. It returns the | | cublasStatus_t error = cublasIdamax_v2(handle, n, x, incx, &result); | |
| * dot product of the double-complex vectors x and y if successful, and dou | | cublasSetError(error); | |
| ble-complex | | return result; | |
| * zero otherwise. It computes the sum for i = 0 to n - 1 of x[lx + i * inc | | } | |
| x] * | | | |
| * y[ly + i * incy], where lx = 1 if incx >= 0, else lx = 1 + (1 - n) * inc | | | |
| x; | | | |
| * ly is defined in a similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x double-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns double-complex dot product (zero if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zdotu.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has nor been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to execute on GPU | | | |
| */ | | | |
| cuDoubleComplex CUBLASAPI cublasZdotu (int n, const cuDoubleComplex *x, int | | | |
| incx, | | | |
| const cuDoubleComplex *y, int incy); | | | |
| | | | |
|
| /* | | static __inline__ int CUBLASAPI cublasIcamax (int n, const cuComplex *x, in | |
| * cuDoubleComplex | | t incx) | |
| * cublasZdotc (int n, const cuDoubleComplex *x, int incx, const cuDoubleCo | | { | |
| mplex *y, int incy) | | int result; | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * computes the dot product of two double-precision complex vectors. It ret | | cublasStatus_t error = cublasIcamax_v2(handle, n, x, incx, &result); | |
| urns the | | cublasSetError(error); | |
| * dot product of the double-precision complex vectors conjugate(x) and y i | | return result; | |
| f successful, | | | |
| * and double-precision complex zero otherwise. It computes the | | | |
| * sum for i = 0 to n - 1 of conjugate(x[lx + i * incx]) * y[ly + i * incy | | | |
| ], | | | |
| * where lx = 1 if incx >= 0, else lx = 1 + (1 - n) * incx; | | | |
| * ly is defined in a similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x double-precision complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-precision complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns double-complex dot product (zero if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zdotc.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has nor been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to execute on GPU | | | |
| */ | | | |
| cuDoubleComplex CUBLASAPI cublasZdotc( int n, const cuDoubleComplex *x, int | | | |
| incx, | | | |
| const cuDoubleComplex *y, int incy ) | | | |
| ; | | | |
| | | | |
|
| /* | | } | |
| * void | | | |
| * cublasZscal (int n, cuComplex alpha, cuComplex *x, int incx) | | | |
| * | | | |
| * replaces double-complex vector x with double-complex alpha * x. For i | | | |
| * = 0 to n - 1, it replaces x[ix + i * incx] with alpha * x[ix + i * incx] | | | |
| , | | | |
| * where ix = 1 if incx >= 0, else ix = 1 + (1 - n) * incx. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * alpha double-complex scalar multiplier | | | |
| * x double-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x double-complex result (unchanged if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zscal.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZscal (int n, cuDoubleComplex alpha, cuDoubleComplex * | | | |
| x, int incx); | | | |
| | | | |
|
| /* | | static __inline__ int CUBLASAPI cublasIzamax (int n, const cuDoubleComplex | |
| * void | | *x, int incx) | |
| * cublasZdscal (int n, double alpha, cuDoubleComplex *x, int incx) | | { | |
| * | | int result; | |
| * replaces double-complex vector x with double-complex alpha * x. For i | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * = 0 to n - 1, it replaces x[ix + i * incx] with alpha * x[ix + i * incx] | | cublasStatus_t error = cublasIzamax_v2(handle, n, x, incx, &result); | |
| , | | cublasSetError(error); | |
| * where ix = 1 if incx >= 0, else ix = 1 + (1 - n) * incx. | | return result; | |
| * | | } | |
| * Input | | /*------------------------------------------------------------------------* | |
| * ----- | | / | |
| * n number of elements in input vectors | | /* AMIN */ | |
| * alpha double precision scalar multiplier | | static __inline__ int CUBLASAPI cublasIsamin (int n, const float *x, int in | |
| * x double-complex vector with n elements | | cx) | |
| * incx storage spacing between elements of x | | { | |
| * | | int result; | |
| * Output | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * ------ | | cublasStatus_t error = cublasIsamin_v2(handle, n, x, incx, &result); | |
| * x double-complex result (unchanged if n <= 0 or incx <= 0) | | cublasSetError(error); | |
| * | | return result; | |
| * Reference: http://www.netlib.org/blas/zdscal.f | | } | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZdscal (int n, double alpha, cuDoubleComplex *x, | | | |
| int incx); | | | |
| | | | |
|
| /* | | static __inline__ int CUBLASAPI cublasIdamin (int n, const double *x, int i | |
| * double | | ncx) | |
| * cublasDznrm2 (int n, const cuDoubleComplex *x, int incx) | | { | |
| * | | int result; | |
| * computes the Euclidean norm of the double precision complex n-vector x. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| This code | | cublasStatus_t error = cublasIdamin_v2(handle, n, x, incx, &result); | |
| * uses simple scaling to avoid intermediate underflow and overflow. | | cublasSetError(error); | |
| * | | return result; | |
| * Input | | } | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x double-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns Euclidian norm (0 if n <= 0 or incx <= 0, or if an error occurs) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dznrm2.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| double CUBLASAPI cublasDznrm2 (int n, const cuDoubleComplex *x, int incx); | | | |
| | | | |
|
| /* | | static __inline__ int CUBLASAPI cublasIcamin (int n, const cuComplex *x, in | |
| * void | | t incx) | |
| * cublasZrotg (cuDoubleComplex *host_ca, cuDoubleComplex cb, double *host_ | | { | |
| sc, double *host_cs) | | int result; | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * constructs the complex Givens tranformation | | cublasStatus_t error = cublasIcamin_v2(handle, n, x, incx, &result); | |
| * | | cublasSetError(error); | |
| * ( sc cs ) | | return result; | |
| * G = ( ) , sc^2 + cabs(cs)^2 = 1, | | } | |
| * (-cs sc ) | | | |
| * | | | |
| * which zeros the second entry of the complex 2-vector transpose(ca, cb). | | | |
| * | | | |
| * The quantity ca/cabs(ca)*norm(ca,cb) overwrites ca in storage. The | | | |
| * function crot (n, x, incx, y, incy, sc, cs) is normally called next | | | |
| * to apply the transformation to a 2 x n matrix. | | | |
| * Note that this function is provided for completeness and run exclusively | | | |
| * on the Host. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * ca double-precision complex precision scalar | | | |
| * cb double-precision complex scalar | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * ca double-precision complex ca/cabs(ca)*norm(ca,cb) | | | |
| * sc double-precision cosine component of rotation matrix | | | |
| * cs double-precision complex sine component of rotation matrix | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zrotg.f | | | |
| * | | | |
| * This function does not set any error status. | | | |
| */ | | | |
| void CUBLASAPI cublasZrotg (cuDoubleComplex *host_ca, cuDoubleComplex cb, d | | | |
| ouble *host_sc, | | | |
| cuDoubleComplex *host_cs); | | | |
| | | | |
|
| /* | | static __inline__ int CUBLASAPI cublasIzamin (int n, const cuDoubleComplex | |
| * cublasZrot (int n, cuDoubleComplex *x, int incx, cuDoubleComplex *y, int | | *x, int incx) | |
| incy, double sc, | | { | |
| * cuDoubleComplex cs) | | int result; | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * multiplies a 2x2 matrix ( sc cs) with the 2xn matrix ( transpose(x | | cublasStatus_t error = cublasIzamin_v2(handle, n, x, incx, &result); | |
| ) ) | | cublasSetError(error); | |
| * (-conj(cs) sc) ( transpose(y | | return result; | |
| ) ) | | } | |
| * | | /*------------------------------------------------------------------------* | |
| * The elements of x are in x[lx + i * incx], i = 0 ... n - 1, where lx = 1 | | / | |
| if | | /* ASUM */ | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and similarly for y using ly an | | static __inline__ float CUBLASAPI cublasSasum (int n, const float *x, int i | |
| d | | ncx) | |
| * incy. | | { | |
| * | | float result; | |
| * Input | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * ----- | | cublasStatus_t error = cublasSasum_v2(handle, n, x, incx, &result); | |
| * n number of elements in input vectors | | cublasSetError(error); | |
| * x double-precision complex vector with n elements | | return result; | |
| * incx storage spacing between elements of x | | } | |
| * y double-precision complex vector with n elements | | static __inline__ double CUBLASAPI cublasDasum (int n, const double *x, int | |
| * incy storage spacing between elements of y | | incx) | |
| * sc double-precision cosine component of rotation matrix | | { | |
| * cs double-precision complex sine component of rotation matrix | | double result; | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Output | | cublasStatus_t error = cublasDasum_v2(handle, n, x, incx, &result); | |
| * ------ | | cublasSetError(error); | |
| * x rotated double-precision complex vector x (unchanged if n <= 0) | | return result; | |
| * y rotated double-precision complex vector y (unchanged if n <= 0) | | } | |
| * | | static __inline__ float CUBLASAPI cublasScasum (int n, const cuComplex *x, | |
| * Reference: http://netlib.org/lapack/explore-html/zrot.f.html | | int incx) | |
| * | | { | |
| * Error status for this function can be retrieved via cublasGetError(). | | float result; | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Error Status | | cublasStatus_t error = cublasScasum_v2(handle, n, x, incx, &result); | |
| * ------------ | | cublasSetError(error); | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | return result; | |
| d | | } | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | static __inline__ double CUBLASAPI cublasDzasum (int n, const cuDoubleCompl | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | ex *x, int incx) | |
| */ | | { | |
| void CUBLASAPI cublasZrot (int n, cuDoubleComplex *x, int incx, | | double result; | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDzasum_v2(handle, n, x, incx, &result); | |
| | | cublasSetError(error); | |
| | | return result; | |
| | | } | |
| | | /*------------------------------------------------------------------------* | |
| | | / | |
| | | /* ROT */ | |
| | | static __inline__ void CUBLASAPI cublasSrot (int n, float *x, int incx, flo | |
| | | at *y, int incy, | |
| | | float sc, float ss) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasSrot_v2(handle, n, x, incx, y, incy, &sc, | |
| | | &ss); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasDrot (int n, double *x, int incx, do | |
| | | uble *y, int incy, | |
| | | double sc, double ss) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDrot_v2(handle, n, x, incx, y, incy, &sc, | |
| | | &ss); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasCrot (int n, cuComplex *x, int incx, | |
| | | cuComplex *y, | |
| | | int incy, float c, cuComplex s) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasCrot_v2(handle, n, x, incx, y, incy, &c, & | |
| | | s); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasZrot (int n, cuDoubleComplex *x, int | |
| | | incx, | |
| cuDoubleComplex *y, int incy, double sc, | | cuDoubleComplex *y, int incy, double sc, | |
|
| cuDoubleComplex cs); | | cuDoubleComplex cs) | |
| | | { | |
| /* | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * void | | cublasStatus_t error = cublasZrot_v2(handle, n, x, incx, y, incy, &sc, | |
| * zdrot (int n, cuDoubleComplex *x, int incx, cuCumplex *y, int incy, doub | | &cs); | |
| le c, | | cublasSetError(error); | |
| * double s) | | } | |
| * | | | |
| * multiplies a 2x2 matrix ( c s) with the 2xn matrix ( transpose(x) ) | | | |
| * (-s c) ( transpose(y) ) | | | |
| * | | | |
| * The elements of x are in x[lx + i * incx], i = 0 ... n - 1, where lx = 1 | | | |
| if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and similarly for y using ly an | | | |
| d | | | |
| * incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x double-precision complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-precision complex vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * c cosine component of rotation matrix | | | |
| * s sine component of rotation matrix | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x rotated vector x (unchanged if n <= 0) | | | |
| * y rotated vector y (unchanged if n <= 0) | | | |
| * | | | |
| * Reference http://www.netlib.org/blas/zdrot.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZdrot (int n, cuDoubleComplex *x, int incx, | | | |
| cuDoubleComplex *y, int incy, double c, double | | | |
| s); | | | |
| | | | |
| /* | | | |
| * int | | | |
| * cublasIzamax (int n, const double *x, int incx) | | | |
| * | | | |
| * finds the smallest index of the element having maximum absolute value | | | |
| * in double-complex vector x; that is, the result is the first i, i = 0 | | | |
| * to n - 1 that maximizes abs(real(x[1+i*incx]))+abs(imag(x[1 + i * incx]) | | | |
| ). | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x double-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the smallest index (0 if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/izamax.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| int CUBLASAPI cublasIzamax (int n, const cuDoubleComplex *x, int incx); | | | |
| | | | |
| /* | | | |
| * int | | | |
| * cublasIzamin (int n, const cuDoubleComplex *x, int incx) | | | |
| * | | | |
| * finds the smallest index of the element having minimum absolute value | | | |
| * in double-complex vector x; that is, the result is the first i, i = 0 | | | |
| * to n - 1 that minimizes abs(real(x[1+i*incx]))+abs(imag(x[1 + i * incx]) | | | |
| ). | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x double-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the smallest index (0 if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: Analogous to IZAMAX, see there. | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| int CUBLASAPI cublasIzamin (int n, const cuDoubleComplex *x, int incx); | | | |
| | | | |
| /* | | | |
| * double | | | |
| * cublasDzasum (int n, const cuDoubleComplex *x, int incx) | | | |
| * | | | |
| * takes the sum of the absolute values of a complex vector and returns a | | | |
| * double precision result. Note that this is not the L1 norm of the vector | | | |
| . | | | |
| * The result is the sum from 0 to n-1 of abs(real(x[ix+i*incx])) + | | | |
| * abs(imag(x(ix+i*incx))), where ix = 1 if incx <= 0, else ix = 1+(1-n)*in | | | |
| cx. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x double-complex vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the double precision sum of absolute values of real and imaginar | | | |
| y | | | |
| * parts (0 if n <= 0 or incx <= 0, or if an error occurs) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dzasum.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| double CUBLASAPI cublasDzasum (int n, const cuDoubleComplex *x, int incx); | | | |
| | | | |
| /* --------------- CUBLAS single precision BLAS2 functions --------------- | | | |
| - */ | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSgbmv (char trans, int m, int n, int kl, int ku, float alpha, | | | |
| * const float *A, int lda, const float *x, int incx, float be | | | |
| ta, | | | |
| * float *y, int incy) | | | |
| * | | | |
| * performs one of the matrix-vector operations | | | |
| * | | | |
| * y = alpha*op(A)*x + beta*y, op(A)=A or op(A) = transpose(A) | | | |
| * | | | |
| * alpha and beta are single precision scalars. x and y are single precisio | | | |
| n | | | |
| * vectors. A is an m by n band matrix consisting of single precision eleme | | | |
| nts | | | |
| * with kl sub-diagonals and ku super-diagonals. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A) | | | |
| * m specifies the number of rows of the matrix A. m must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. n must be at lea | | | |
| st | | | |
| * zero. | | | |
| * kl specifies the number of sub-diagonals of matrix A. It must be at | | | |
| * least zero. | | | |
| * ku specifies the number of super-diagonals of matrix A. It must be a | | | |
| t | | | |
| * least zero. | | | |
| * alpha single precision scalar multiplier applied to op(A). | | | |
| * A single precision array of dimensions (lda, n). The leading | | | |
| * (kl + ku + 1) x n part of the array A must contain the band matri | | | |
| x A, | | | |
| * supplied column by column, with the leading diagonal of the matri | | | |
| x | | | |
| * in row (ku + 1) of the array, the first super-diagonal starting a | | | |
| t | | | |
| * position 2 in row ku, the first sub-diagonal starting at position | | | |
| 1 | | | |
| * in row (ku + 2), and so on. Elements in the array A that do not | | | |
| * correspond to elements in the band matrix (such as the top left | | | |
| * ku x ku triangle) are not referenced. | | | |
| * lda leading dimension of A. lda must be at least (kl + ku + 1). | | | |
| * x single precision array of length at least (1+(n-1)*abs(incx)) whe | | | |
| n | | | |
| * trans == 'N' or 'n' and at least (1+(m-1)*abs(incx)) otherwise. | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta single precision scalar multiplier applied to vector y. If beta i | | | |
| s | | | |
| * zero, y is not read. | | | |
| * y single precision array of length at least (1+(m-1)*abs(incy)) whe | | | |
| n | | | |
| * trans == 'N' or 'n' and at least (1+(n-1)*abs(incy)) otherwise. I | | | |
| f | | | |
| * beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*op(A)*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/sgbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n, kl, or ku < 0; if incx or incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSgbmv (char trans, int m, int n, int kl, int ku, | | | |
| float alpha, const float *A, int lda, | | | |
| const float *x, int incx, float beta, float *y, | | | |
| int incy); | | | |
| | | | |
| /* | | | |
| * cublasSgemv (char trans, int m, int n, float alpha, const float *A, int | | | |
| lda, | | | |
| * const float *x, int incx, float beta, float *y, int incy) | | | |
| * | | | |
| * performs one of the matrix-vector operations | | | |
| * | | | |
| * y = alpha * op(A) * x + beta * y, | | | |
| * | | | |
| * where op(A) is one of | | | |
| * | | | |
| * op(A) = A or op(A) = transpose(A) | | | |
| * | | | |
| * where alpha and beta are single precision scalars, x and y are single | | | |
| * precision vectors, and A is an m x n matrix consisting of single precisi | | | |
| on | | | |
| * elements. Matrix A is stored in column major format, and lda is the lead | | | |
| ing | | | |
| * dimension of the two-dimensional array in which A is stored. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * trans specifies op(A). If transa = 'n' or 'N', op(A) = A. If trans = | | | |
| * trans = 't', 'T', 'c', or 'C', op(A) = transpose(A) | | | |
| * m specifies the number of rows of the matrix A. m must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. n must be at lea | | | |
| st | | | |
| * zero. | | | |
| * alpha single precision scalar multiplier applied to op(A). | | | |
| * A single precision array of dimensions (lda, n) if trans = 'n' or | | | |
| * 'N'), and of dimensions (lda, m) otherwise. lda must be at least | | | |
| * max(1, m) and at least max(1, n) otherwise. | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )) | | | |
| * when trans = 'N' or 'n' and at least (1 + (m - 1) * abs(incx)) | | | |
| * otherwise. | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * beta single precision scalar multiplier applied to vector y. If beta | | | |
| * is zero, y is not read. | | | |
| * y single precision array of length at least (1 + (m - 1) * abs(incy | | | |
| )) | | | |
| * when trans = 'N' or 'n' and at least (1 + (n - 1) * abs(incy)) | | | |
| * otherwise. | | | |
| * incy specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to alpha * op(A) * x + beta * y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/sgemv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0, or if incx or incy == | | | |
| 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSgemv (char trans, int m, int n, float alpha, | | | |
| const float *A, int lda, const float *x, int in | | | |
| cx, | | | |
| float beta, float *y, int incy); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasCsrot (int n, cuComplex *x, int incx | |
| * cublasSger (int m, int n, float alpha, const float *x, int incx, | | , cuComplex *y, | |
| * const float *y, int incy, float *A, int lda) | | int incy, float c, float s) | |
| * | | { | |
| * performs the symmetric rank 1 operation | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasCsrot_v2(handle, n, x, incx, y, incy, &c, | |
| * A = alpha * x * transpose(y) + A, | | &s); | |
| * | | cublasSetError(error); | |
| * where alpha is a single precision scalar, x is an m element single | | } | |
| * precision vector, y is an n element single precision vector, and A | | static __inline__ void CUBLASAPI cublasZdrot (int n, cuDoubleComplex *x, in | |
| * is an m by n matrix consisting of single precision elements. Matrix A | | t incx, | |
| * is stored in column major format, and lda is the leading dimension of | | cuDoubleComplex *y, int incy, double c, double | |
| * the two-dimensional array used to store A. | | s) | |
| * | | { | |
| * Input | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * ----- | | cublasStatus_t error = cublasZdrot_v2(handle, n, x, incx, y, incy, &c, | |
| * m specifies the number of rows of the matrix A. It must be at least | | &s); | |
| * zero. | | cublasSetError(error); | |
| * n specifies the number of columns of the matrix A. It must be at | | } | |
| * least zero. | | /*------------------------------------------------------------------------* | |
| * alpha single precision scalar multiplier applied to x * transpose(y) | | / | |
| * x single precision array of length at least (1 + (m - 1) * abs(incx | | /* ROTG */ | |
| )) | | static __inline__ void CUBLASAPI cublasSrotg (float *sa, float *sb, float * | |
| * incx specifies the storage spacing between elements of x. incx must no | | sc, float *ss) | |
| t | | { | |
| * be zero. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * y single precision array of length at least (1 + (n - 1) * abs(incy | | cublasStatus_t error = cublasSrotg_v2(handle, sa, sb, sc, ss); | |
| )) | | cublasSetError(error); | |
| * incy specifies the storage spacing between elements of y. incy must no | | } | |
| t | | static __inline__ void CUBLASAPI cublasDrotg (double *sa, double *sb, doubl | |
| * be zero. | | e *sc, double *ss) | |
| * A single precision array of dimensions (lda, n). | | { | |
| * lda leading dimension of two-dimensional array used to store matrix A | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasDrotg_v2(handle, sa, sb, sc, ss); | |
| * Output | | cublasSetError(error); | |
| * ------ | | } | |
| * A updated according to A = alpha * x * transpose(y) + A | | static __inline__ void CUBLASAPI cublasCrotg (cuComplex *ca, cuComplex cb, | |
| * | | float *sc, | |
| * Reference: http://www.netlib.org/blas/sger.f | | cuComplex *cs) | |
| * | | { | |
| * Error status for this function can be retrieved via cublasGetError(). | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasCrotg_v2(handle, ca, &cb, sc, cs); | |
| * Error Status | | cublasSetError(error); | |
| * ------------ | | } | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | static __inline__ void CUBLASAPI cublasZrotg (cuDoubleComplex *ca, cuDouble | |
| d | | Complex cb, double *sc, | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | cuDoubleComplex *cs) | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | { | |
| */ | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| void CUBLASAPI cublasSger (int m, int n, float alpha, const float *x, int i | | cublasStatus_t error = cublasZrotg_v2(handle, ca, &cb, sc, cs); | |
| ncx, | | cublasSetError(error); | |
| const float *y, int incy, float *A, int lda); | | } | |
| | | /*------------------------------------------------------------------------* | |
| | | / | |
| | | /* ROTM */ | |
| | | static __inline__ void CUBLASAPI cublasSrotm(int n, float *x, int incx, flo | |
| | | at *y, int incy, | |
| | | const float* sparam) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasSrotm_v2(handle, n, x, incx, y, incy, spar | |
| | | am); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasDrotm(int n, double *x, int incx, do | |
| | | uble *y, int incy, | |
| | | const double* sparam) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDrotm_v2(handle, n, x, incx, y, incy, spar | |
| | | am); | |
| | | cublasSetError(error); | |
| | | } | |
| | | /*------------------------------------------------------------------------* | |
| | | / | |
| | | /* ROTMG */ | |
| | | static __inline__ void CUBLASAPI cublasSrotmg (float *sd1, float *sd2, floa | |
| | | t *sx1, | |
| | | const float *sy1, float* sparam) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasSrotmg_v2(handle, sd1, sd2, sx1, sy1, spar | |
| | | am); | |
| | | cublasSetError(error); | |
| | | } | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasDrotmg (double *sd1, double *sd2, do | |
| * void | | uble *sx1, | |
| * cublasSsbmv (char uplo, int n, int k, float alpha, const float *A, int l | | const double *sy1, double* sparam) | |
| da, | | { | |
| * const float *x, int incx, float beta, float *y, int incy) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasDrotmg_v2(handle, sd1, sd2, sx1, sy1, spar | |
| * performs the matrix-vector operation | | am); | |
| * | | cublasSetError(error); | |
| * y := alpha*A*x + beta*y | | } | |
| * | | | |
| * alpha and beta are single precision scalars. x and y are single precisio | | | |
| n | | | |
| * vectors with n elements. A is an n x n symmetric band matrix consisting | | | |
| * of single precision elements, with k super-diagonals and the same number | | | |
| * of sub-diagonals. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the upper or lower triangular part of the symme | | | |
| tric | | | |
| * band matrix A is being supplied. If uplo == 'U' or 'u', the upper | | | |
| * triangular part is being supplied. If uplo == 'L' or 'l', the low | | | |
| er | | | |
| * triangular part is being supplied. | | | |
| * n specifies the number of rows and the number of columns of the | | | |
| * symmetric matrix A. n must be at least zero. | | | |
| * k specifies the number of super-diagonals of matrix A. Since the ma | | | |
| trix | | | |
| * is symmetric, this is also the number of sub-diagonals. k must be | | | |
| at | | | |
| * least zero. | | | |
| * alpha single precision scalar multiplier applied to A*x. | | | |
| * A single precision array of dimensions (lda, n). When uplo == 'U' o | | | |
| r | | | |
| * 'u', the leading (k + 1) x n part of array A must contain the upp | | | |
| er | | | |
| * triangular band of the symmetric matrix, supplied column by colum | | | |
| n, | | | |
| * with the leading diagonal of the matrix in row (k+1) of the array | | | |
| , | | | |
| * the first super-diagonal starting at position 2 in row k, and so | | | |
| on. | | | |
| * The top left k x k triangle of the array A is not referenced. Whe | | | |
| n | | | |
| * uplo == 'L' or 'l', the leading (k + 1) x n part of the array A m | | | |
| ust | | | |
| * contain the lower triangular band part of the symmetric matrix, | | | |
| * supplied column by column, with the leading diagonal of the matri | | | |
| x in | | | |
| * row 1 of the array, the first sub-diagonal starting at position 1 | | | |
| in | | | |
| * row 2, and so on. The bottom right k x k triangle of the array A | | | |
| is | | | |
| * not referenced. | | | |
| * lda leading dimension of A. lda must be at least (k + 1). | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta single precision scalar multiplier applied to vector y. If beta i | | | |
| s | | | |
| * zero, y is not read. | | | |
| * y single precision array of length at least (1 + (n - 1) * abs(incy | | | |
| )). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ssbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_INVALID_VALUE if k or n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| | | | |
|
| void CUBLASAPI cublasSsbmv (char uplo, int n, int k, float alpha, | | /* --------------- CUBLAS BLAS2 functions ---------------- */ | |
| | | /* GEMV */ | |
| | | static __inline__ void CUBLASAPI cublasSgemv (char trans, int m, int n, flo | |
| | | at alpha, | |
| const float *A, int lda, const float *x, int in
cx, | | const float *A, int lda, const float *x, int in
cx, | |
|
| float beta, float *y, int incy); | | float beta, float *y, int incy) | |
| /* | | { | |
| * void | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * cublasSspmv (char uplo, int n, float alpha, const float *AP, const float | | cublasStatus_t error = cublasSgemv_v2(handle, convertToOp(trans), m,n, | |
| *x, | | &alpha, A, lda, x, incx, &beta, y, incy | |
| * int incx, float beta, float *y, int incy) | | ); | |
| * | | cublasSetError(error); | |
| * performs the matrix-vector operation | | } | |
| * | | static __inline__ void CUBLASAPI cublasDgemv (char trans, int m, int n, dou | |
| * y = alpha * A * x + beta * y | | ble alpha, | |
| * | | const double *A, int lda, const double *x, int | |
| * Alpha and beta are single precision scalars, and x and y are single | | incx, | |
| * precision vectors with n elements. A is a symmetric n x n matrix | | double beta, double *y, int incy) | |
| * consisting of single precision elements that is supplied in packed form. | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Input | | cublasStatus_t error = cublasDgemv_v2(handle, convertToOp(trans), m,n, | |
| * ----- | | &alpha, A, lda, x, incx, &beta, y, incy | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | ); | |
| ower | | cublasSetError(error); | |
| * triangular part of array AP. If uplo == 'U' or 'u', then the uppe | | } | |
| r | | static __inline__ void CUBLASAPI cublasCgemv (char trans, int m, int n, cuC | |
| * triangular part of A is supplied in AP. If uplo == 'L' or 'l', th | | omplex alpha, | |
| en | | const cuComplex *A, int lda, const cuComplex *x | |
| * the lower triangular part of A is supplied in AP. | | , int incx, | |
| * n specifies the number of rows and columns of the matrix A. It must | | cuComplex beta, cuComplex *y, int incy) | |
| be | | { | |
| * at least zero. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * alpha single precision scalar multiplier applied to A*x. | | cublasStatus_t error = cublasCgemv_v2(handle, convertToOp(trans), m,n, | |
| * AP single precision array with at least ((n * (n + 1)) / 2) elements | | &alpha, A, lda, x, incx, &beta, y, incy | |
| . If | | ); | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | cublasSetError(error); | |
| rt | | } | |
| * of the symmetric matrix A, packed sequentially, column by column; | | static __inline__ void CUBLASAPI cublasZgemv (char trans, int m, int n, cuD | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | oubleComplex alpha, | |
| f | | const cuDoubleComplex *A, int lda, const cuDoub | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | leComplex *x, int incx, | |
| rt | | cuDoubleComplex beta, cuDoubleComplex *y, int i | |
| * of the symmetric matrix A, packed sequentially, column by column; | | ncy) | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | { | |
| ]. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | cublasStatus_t error = cublasZgemv_v2(handle, convertToOp(trans), m,n, | |
| )). | | &alpha, A, lda, x, incx, &beta, y, incy | |
| * incx storage spacing between elements of x. incx must not be zero. | | ); | |
| * beta single precision scalar multiplier applied to vector y; | | cublasSetError(error); | |
| * y single precision array of length at least (1 + (n - 1) * abs(incy | | } | |
| )). | | /*------------------------------------------------------------------------* | |
| * If beta is zero, y is not read. | | / | |
| * incy storage spacing between elements of y. incy must not be zero. | | /* GBMV */ | |
| * | | static __inline__ void CUBLASAPI cublasSgbmv (char trans, int m, int n, int | |
| * Output | | kl, int ku, | |
| * ------ | | float alpha, const float *A, int lda, | |
| * y updated according to y = alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/sspmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSspmv (char uplo, int n, float alpha, const float *AP, | | | |
| const float *x, int incx, float beta, float *y, | | const float *x, int incx, float beta, float *y, | |
|
| int incy); | | int incy) | |
| | | { | |
| /* | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * void | | cublasStatus_t error = cublasSgbmv_v2(handle, convertToOp(trans), m,n,k | |
| * cublasSspr (char uplo, int n, float alpha, const float *x, int incx, | | l,ku, | |
| * float *AP) | | &alpha, A, lda, x, incx, &beta, y, incy | |
| * | | ); | |
| * performs the symmetric rank 1 operation | | cublasSetError(error); | |
| * | | } | |
| * A = alpha * x * transpose(x) + A, | | static __inline__ void CUBLASAPI cublasDgbmv (char trans, int m, int n, int | |
| * | | kl, int ku, | |
| * where alpha is a single precision scalar and x is an n element single | | double alpha, const double *A, int lda, | |
| * precision vector. A is a symmetric n x n matrix consisting of single | | const double *x, int incx, double beta, double | |
| * precision elements that is supplied in packed form. | | *y, | |
| * | | int incy) | |
| * Input | | { | |
| * ----- | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | cublasStatus_t error = cublasDgbmv_v2(handle, convertToOp(trans), m,n,k | |
| ower | | l,ku, | |
| * triangular part of array AP. If uplo == 'U' or 'u', then the uppe | | &alpha, A, lda, x, incx, &beta, y, incy | |
| r | | ); | |
| * triangular part of A is supplied in AP. If uplo == 'L' or 'l', th | | cublasSetError(error); | |
| en | | } | |
| * the lower triangular part of A is supplied in AP. | | static __inline__ void CUBLASAPI cublasCgbmv (char trans, int m, int n, int | |
| * n specifies the number of rows and columns of the matrix A. It must | | kl, int ku, | |
| be | | cuComplex alpha, const cuComplex *A, int lda, | |
| * at least zero. | | const cuComplex *x, int incx, cuComplex beta, c | |
| * alpha single precision scalar multiplier applied to x * transpose(x). | | uComplex *y, | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | int incy) | |
| )). | | { | |
| * incx storage spacing between elements of x. incx must not be zero. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * AP single precision array with at least ((n * (n + 1)) / 2) elements | | cublasStatus_t error = cublasCgbmv_v2(handle, convertToOp(trans), m,n,k | |
| . If | | l,ku, | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | &alpha, A, lda, x, incx, &beta, y, incy | |
| rt | | ); | |
| * of the symmetric matrix A, packed sequentially, column by column; | | cublasSetError(error); | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | } | |
| f | | static __inline__ void CUBLASAPI cublasZgbmv (char trans, int m, int n, int | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | kl, int ku, | |
| rt | | cuDoubleComplex alpha, const cuDoubleComplex *A | |
| * of the symmetric matrix A, packed sequentially, column by column; | | , int lda, | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | const cuDoubleComplex *x, int incx, cuDoubleCom | |
| ]. | | plex beta, cuDoubleComplex *y, | |
| * | | int incy) | |
| * Output | | { | |
| * ------ | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * A updated according to A = alpha * x * transpose(x) + A | | cublasStatus_t error = cublasZgbmv_v2(handle, convertToOp(trans), m,n,k | |
| * | | l,ku, | |
| * Reference: http://www.netlib.org/blas/sspr.f | | &alpha, A, lda, x, incx, &beta, y, incy | |
| * | | ); | |
| * Error status for this function can be retrieved via cublasGetError(). | | cublasSetError(error); | |
| * | | } | |
| * Error Status | | /*------------------------------------------------------------------------* | |
| * ------------ | | / | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | /* TRMV */ | |
| d | | static __inline__ void CUBLASAPI cublasStrmv (char uplo, char trans, char d | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or incx == 0 | | iag, int n, | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | const float *A, int lda, float *x, int incx) | |
| */ | | { | |
| void CUBLASAPI cublasSspr (char uplo, int n, float alpha, const float *x, | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| int incx, float *AP); | | cublasStatus_t error = cublasStrmv_v2(handle, convertToFillMode(uplo), | |
| | | convertToOp(trans), | |
| /* | | convertToDiagType(diag), n, A, lda, x, i | |
| * void | | ncx ); | |
| * cublasSspr2 (char uplo, int n, float alpha, const float *x, int incx, | | cublasSetError(error); | |
| * const float *y, int incy, float *AP) | | } | |
| * | | static __inline__ void CUBLASAPI cublasDtrmv (char uplo, char trans, char d | |
| * performs the symmetric rank 2 operation | | iag, int n, | |
| * | | const double *A, int lda, double *x, int incx) | |
| * A = alpha*x*transpose(y) + alpha*y*transpose(x) + A, | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * where alpha is a single precision scalar, and x and y are n element sing | | cublasStatus_t error = cublasDtrmv_v2(handle, convertToFillMode(uplo), | |
| le | | convertToOp(trans), | |
| * precision vectors. A is a symmetric n x n matrix consisting of single | | convertToDiagType(diag), n, A, lda, x, i | |
| * precision elements that is supplied in packed form. | | ncx ); | |
| * | | cublasSetError(error); | |
| * Input | | } | |
| * ----- | | static __inline__ void CUBLASAPI cublasCtrmv (char uplo, char trans, char d | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | iag, int n, | |
| ower | | const cuComplex *A, int lda, cuComplex *x, int | |
| * triangular part of array A. If uplo == 'U' or 'u', then only the | | incx) | |
| * upper triangular part of A may be referenced and the lower triang | | { | |
| ular | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * part of A is inferred. If uplo == 'L' or 'l', then only the lower | | cublasStatus_t error = cublasCtrmv_v2(handle, convertToFillMode(uplo), | |
| * triangular part of A may be referenced and the upper triangular p | | convertToOp(trans), | |
| art | | convertToDiagType(diag), n, A, lda, x, i | |
| * of A is inferred. | | ncx ); | |
| * n specifies the number of rows and columns of the matrix A. It must | | cublasSetError(error); | |
| be | | } | |
| * at least zero. | | static __inline__ void CUBLASAPI cublasZtrmv (char uplo, char trans, char d | |
| * alpha single precision scalar multiplier applied to x * transpose(y) + | | iag, int n, | |
| * y * transpose(x). | | const cuDoubleComplex *A, int lda, cuDoubleComp | |
| * x single precision array of length at least (1 + (n - 1) * abs (inc | | lex *x, int incx) | |
| x)). | | { | |
| * incx storage spacing between elements of x. incx must not be zero. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * y single precision array of length at least (1 + (n - 1) * abs (inc | | cublasStatus_t error = cublasZtrmv_v2(handle, convertToFillMode(uplo), | |
| y)). | | convertToOp(trans), | |
| * incy storage spacing between elements of y. incy must not be zero. | | convertToDiagType(diag), n, A, lda, x, i | |
| * AP single precision array with at least ((n * (n + 1)) / 2) elements | | ncx ); | |
| . If | | cublasSetError(error); | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | } | |
| rt | | /*------------------------------------------------------------------------* | |
| * of the symmetric matrix A, packed sequentially, column by column; | | / | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | /* TBMV */ | |
| f | | static __inline__ void CUBLASAPI cublasStbmv (char uplo, char trans, char d | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | iag, int n, int k, | |
| rt | | const float *A, int lda, float *x, int incx) | |
| * of the symmetric matrix A, packed sequentially, column by column; | | { | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| ]. | | cublasStatus_t error = cublasStbmv_v2(handle, convertToFillMode(uplo), | |
| * | | convertToOp(trans), | |
| * Output | | convertToDiagType(diag), n, k, A, lda, x | |
| * ------ | | , incx ); | |
| * A updated according to A = alpha*x*transpose(y)+alpha*y*transpose(x | | cublasSetError(error); | |
| )+A | | } | |
| * | | static __inline__ void CUBLASAPI cublasDtbmv (char uplo, char trans, char d | |
| * Reference: http://www.netlib.org/blas/sspr2.f | | iag, int n, int k, | |
| * | | const double *A, int lda, double *x, int incx) | |
| * Error status for this function can be retrieved via cublasGetError(). | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Error Status | | cublasStatus_t error = cublasDtbmv_v2(handle, convertToFillMode(uplo), | |
| * ------------ | | convertToOp(trans), | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | convertToDiagType(diag), n, k, A, lda, x | |
| d | | , incx ); | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | cublasSetError(error); | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | } | |
| */ | | static __inline__ void CUBLASAPI cublasCtbmv (char uplo, char trans, char d | |
| void CUBLASAPI cublasSspr2 (char uplo, int n, float alpha, const float *x, | | iag, int n, int k, | |
| int incx, const float *y, int incy, float *AP); | | const cuComplex *A, int lda, cuComplex *x, int | |
| | | incx) | |
| /* | | { | |
| * void | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * cublasSsymv (char uplo, int n, float alpha, const float *A, int lda, | | cublasStatus_t error = cublasCtbmv_v2(handle, convertToFillMode(uplo), | |
| * const float *x, int incx, float beta, float *y, int incy) | | convertToOp(trans), | |
| * | | convertToDiagType(diag), n, k, A, lda, x | |
| * performs the matrix-vector operation | | , incx ); | |
| * | | cublasSetError(error); | |
| * y = alpha*A*x + beta*y | | } | |
| * | | static __inline__ void CUBLASAPI cublasZtbmv (char uplo, char trans, char d | |
| * Alpha and beta are single precision scalars, and x and y are single | | iag, int n, int k, | |
| * precision vectors, each with n elements. A is a symmetric n x n matrix | | const cuDoubleComplex *A, int lda, cuDoubleComp | |
| * consisting of single precision elements that is stored in either upper o | | lex *x, int incx) | |
| r | | { | |
| * lower storage mode. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasZtbmv_v2(handle, convertToFillMode(uplo), | |
| * Input | | convertToOp(trans), | |
| * ----- | | convertToDiagType(diag), n, k, A, lda, x | |
| * uplo specifies whether the upper or lower triangular part of the array | | , incx ); | |
| A | | cublasSetError(error); | |
| * is to be referenced. If uplo == 'U' or 'u', the symmetric matrix | | } | |
| A | | /*------------------------------------------------------------------------* | |
| * is stored in upper storage mode, i.e. only the upper triangular p | | / | |
| art | | /* TPMV */ | |
| * of A is to be referenced while the lower triangular part of A is | | static __inline__ void CUBLASAPI cublasStpmv(char uplo, char trans, char di | |
| to | | ag, int n, const float *AP, float *x, int incx) | |
| * be inferred. If uplo == 'L' or 'l', the symmetric matrix A is sto | | { | |
| red | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * in lower storage mode, i.e. only the lower triangular part of A i | | cublasStatus_t error = cublasStpmv_v2(handle, convertToFillMode(uplo), | |
| s | | convertToOp(trans), | |
| * to be referenced while the upper triangular part of A is to be | | convertToDiagType(diag), n, AP, x, incx | |
| * inferred. | | ); | |
| * n specifies the number of rows and the number of columns of the | | cublasSetError(error); | |
| * symmetric matrix A. n must be at least zero. | | } | |
| * alpha single precision scalar multiplier applied to A*x. | | | |
| * A single precision array of dimensions (lda, n). If uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n upper triangular part of the array A must conta | | | |
| in | | | |
| * the upper triangular part of the symmetric matrix and the strictl | | | |
| y | | | |
| * lower triangular part of A is not referenced. If uplo == 'L' or ' | | | |
| l', | | | |
| * the leading n x n lower triangular part of the array A must conta | | | |
| in | | | |
| * the lower triangular part of the symmetric matrix and the strictl | | | |
| y | | | |
| * upper triangular part of A is not referenced. | | | |
| * lda leading dimension of A. It must be at least max (1, n). | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta single precision scalar multiplier applied to vector y. | | | |
| * y single precision array of length at least (1 + (n - 1) * abs(incy | | | |
| )). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ssymv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSsymv (char uplo, int n, float alpha, const float *A, | | | |
| int lda, const float *x, int incx, float beta, | | | |
| float *y, int incy); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSsyr (char uplo, int n, float alpha, const float *x, int incx, | | | |
| * float *A, int lda) | | | |
| * | | | |
| * performs the symmetric rank 1 operation | | | |
| * | | | |
| * A = alpha * x * transpose(x) + A, | | | |
| * | | | |
| * where alpha is a single precision scalar, x is an n element single | | | |
| * precision vector and A is an n x n symmetric matrix consisting of | | | |
| * single precision elements. Matrix A is stored in column major format, | | | |
| * and lda is the leading dimension of the two-dimensional array | | | |
| * containing A. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or | | | |
| * the lower triangular part of array A. If uplo = 'U' or 'u', | | | |
| * then only the upper triangular part of A may be referenced. | | | |
| * If uplo = 'L' or 'l', then only the lower triangular part of | | | |
| * A may be referenced. | | | |
| * n specifies the number of rows and columns of the matrix A. It | | | |
| * must be at least 0. | | | |
| * alpha single precision scalar multiplier applied to x * transpose(x) | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )) | | | |
| * incx specifies the storage spacing between elements of x. incx must | | | |
| * not be zero. | | | |
| * A single precision array of dimensions (lda, n). If uplo = 'U' or | | | |
| * 'u', then A must contain the upper triangular part of a symmetric | | | |
| * matrix, and the strictly lower triangular part is not referenced. | | | |
| * If uplo = 'L' or 'l', then A contains the lower triangular part | | | |
| * of a symmetric matrix, and the strictly upper triangular part is | | | |
| * not referenced. | | | |
| * lda leading dimension of the two-dimensional array containing A. lda | | | |
| * must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * transpose(x) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ssyr.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or incx == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSsyr (char uplo, int n, float alpha, const float *x, | | | |
| int incx, float *A, int lda); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSsyr2 (char uplo, int n, float alpha, const float *x, int incx, | | | |
| * const float *y, int incy, float *A, int lda) | | | |
| * | | | |
| * performs the symmetric rank 2 operation | | | |
| * | | | |
| * A = alpha*x*transpose(y) + alpha*y*transpose(x) + A, | | | |
| * | | | |
| * where alpha is a single precision scalar, x and y are n element single | | | |
| * precision vector and A is an n by n symmetric matrix consisting of singl | | | |
| e | | | |
| * precision elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array A. If uplo == 'U' or 'u', then only the | | | |
| * upper triangular part of A may be referenced and the lower triang | | | |
| ular | | | |
| * part of A is inferred. If uplo == 'L' or 'l', then only the lower | | | |
| * triangular part of A may be referenced and the upper triangular p | | | |
| art | | | |
| * of A is inferred. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha single precision scalar multiplier applied to x * transpose(y) + | | | |
| * y * transpose(x). | | | |
| * x single precision array of length at least (1 + (n - 1) * abs (inc | | | |
| x)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * y single precision array of length at least (1 + (n - 1) * abs (inc | | | |
| y)). | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * A single precision array of dimensions (lda, n). If uplo == 'U' or | | | |
| 'u', | | | |
| * then A must contains the upper triangular part of a symmetric mat | | | |
| rix, | | | |
| * and the strictly lower triangular parts is not referenced. If upl | | | |
| o == | | | |
| * 'L' or 'l', then A contains the lower triangular part of a symmet | | | |
| ric | | | |
| * matrix, and the strictly upper triangular part is not referenced. | | | |
| * lda leading dimension of A. It must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha*x*transpose(y)+alpha*y*transpose(x | | | |
| )+A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ssyr2.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSsyr2 (char uplo, int n, float alpha, const float *x, | | | |
| int incx, const float *y, int incy, float *A, | | | |
| int lda); | | | |
| /* | | | |
| * void | | | |
| * cublasStbmv (char uplo, char trans, char diag, int n, int k, const float | | | |
| *A, | | | |
| * int lda, float *x, int incx) | | | |
| * | | | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | | |
| = A | | | |
| * or op(A) = transpose(A). x is an n-element single precision vector, and | | | |
| A is | | | |
| * an n x n, unit or non-unit upper or lower triangular band matrix consist | | | |
| ing | | | |
| * of single precision elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular ba | | | |
| nd | | | |
| * matrix. If uplo == 'U' or 'u', A is an upper triangular band matr | | | |
| ix. | | | |
| * If uplo == 'L' or 'l', A is a lower triangular band matrix. | | | |
| * trans specifies op(A). If transa == 'N' or 'n', op(A) = A. If trans == | | | |
| 'T', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A). | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag == | | | |
| 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag == 'N' or 'n' | | | |
| , A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. In the current implementation n must not exceed 40 | | | |
| 70. | | | |
| * k specifies the number of super- or sub-diagonals. If uplo == 'U' o | | | |
| r | | | |
| * 'u', k specifies the number of super-diagonals. If uplo == 'L' or | | | |
| * 'l', k specifies the number of sub-diagonals. k must at least be | | | |
| * zero. | | | |
| * A single precision array of dimension (lda, n). If uplo == 'U' or ' | | | |
| u', | | | |
| * the leading (k + 1) x n part of the array A must contain the uppe | | | |
| r | | | |
| * triangular band matrix, supplied column by column, with the leadi | | | |
| ng | | | |
| * diagonal of the matrix in row (k + 1) of the array, the first | | | |
| * super-diagonal starting at position 2 in row k, and so on. The to | | | |
| p | | | |
| * left k x k triangle of the array A is not referenced. If uplo == | | | |
| 'L' | | | |
| * or 'l', the leading (k + 1) x n part of the array A must constain | | | |
| the | | | |
| * lower triangular band matrix, supplied column by column, with the | | | |
| * leading diagonal of the matrix in row 1 of the array, the first | | | |
| * sub-diagonal startingat position 1 in row 2, and so on. The botto | | | |
| m | | | |
| * right k x k triangle of the array is not referenced. | | | |
| * lda is the leading dimension of A. It must be at least (k + 1). | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/stbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, k < 0, or incx == 0 | | | |
| * CUBLAS_STATUS_ALLOC_FAILED if function cannot allocate enough intern | | | |
| al scratch vector memory | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasStbmv (char uplo, char trans, char diag, int n, int k, | | | |
| const float *A, int lda, float *x, int incx); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasDtpmv(char uplo, char trans, char di | |
| * void cublasStbsv (char uplo, char trans, char diag, int n, int k, | | ag, int n, const double *AP, double *x, int incx) | |
| * const float *A, int lda, float *X, int incx) | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * solves one of the systems of equations op(A)*x = b, where op(A) is eithe | | cublasStatus_t error = cublasDtpmv_v2(handle, convertToFillMode(uplo), | |
| r | | convertToOp(trans), | |
| * op(A) = A or op(A) = transpose(A). b and x are n-element vectors, and A | | convertToDiagType(diag), n, AP, x, incx | |
| is | | ); | |
| * an n x n unit or non-unit, upper or lower triangular band matrix with k | | cublasSetError(error); | |
| + 1 | | | |
| * diagonals. No test for singularity or near-singularity is included in th | | | |
| is | | | |
| * function. Such tests must be performed before calling this function. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix is an upper or lower triangular band | | | |
| * matrix as follows: If uplo == 'U' or 'u', A is an upper triangula | | | |
| r | | | |
| * band matrix. If uplo == 'L' or 'l', A is a lower triangular band | | | |
| * matrix. | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A). | | | |
| * diag specifies whether A is unit triangular. If diag == 'U' or 'u', A | | | |
| is | | | |
| * assumed to be unit triangular; thas is, diagonal elements are not | | | |
| * read and are assumed to be unity. If diag == 'N' or 'n', A is not | | | |
| * assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * k specifies the number of super- or sub-diagonals. If uplo == 'U' o | | | |
| r | | | |
| * 'u', k specifies the number of super-diagonals. If uplo == 'L' or | | | |
| * 'l', k specifies the number of sub-diagonals. k must be at least | | | |
| * zero. | | | |
| * A single precision array of dimension (lda, n). If uplo == 'U' or ' | | | |
| u', | | | |
| * the leading (k + 1) x n part of the array A must contain the uppe | | | |
| r | | | |
| * triangular band matrix, supplied column by column, with the leadi | | | |
| ng | | | |
| * diagonal of the matrix in row (k + 1) of the array, the first sup | | | |
| er- | | | |
| * diagonal starting at position 2 in row k, and so on. The top left | | | |
| * k x k triangle of the array A is not referenced. If uplo == 'L' o | | | |
| r | | | |
| * 'l', the leading (k + 1) x n part of the array A must constain th | | | |
| e | | | |
| * lower triangular band matrix, supplied column by column, with the | | | |
| * leading diagonal of the matrix in row 1 of the array, the first | | | |
| * sub-diagonal starting at position 1 in row 2, and so on. The bott | | | |
| om | | | |
| * right k x k triangle of the array is not referenced. | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * On entry, x contains the n-element right-hand side vector b. On e | | | |
| xit, | | | |
| * it is overwritten with the solution vector x. | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/stbsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0, n < 0 or n > 4070 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasStbsv (char uplo, char trans, char diag, int n, int k, | | | |
| const float *A, int lda, float *x, int incx); | | | |
| | | | |
|
| /* | | } | |
| * void | | | |
| * cublasStpmv (char uplo, char trans, char diag, int n, const float *AP, | | | |
| * float *x, int incx); | | | |
| * | | | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | | |
| = A, | | | |
| * or op(A) = transpose(A). x is an n element single precision vector, and | | | |
| A | | | |
| * is an n x n, unit or non-unit, upper or lower triangular matrix composed | | | |
| * of single precision elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo == 'U' or 'u', then A is an upper triangular matr | | | |
| ix. | | | |
| * If uplo == 'L' or 'l', then A is a lower triangular matrix. | | | |
| * trans specifies op(A). If transa == 'N' or 'n', op(A) = A. If trans == | | | |
| 'T', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A) | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag == | | | |
| 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag == 'N' or 'n' | | | |
| , A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * AP single precision array with at least ((n * (n + 1)) / 2) elements | | | |
| . If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored in AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x, | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/stpmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 | | | |
| * CUBLAS_STATUS_ALLOC_FAILED if function cannot allocate enough intern | | | |
| al scratch vector memory | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasStpmv (char uplo, char trans, char diag, int n, | | | |
| const float *AP, float *x, int incx); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasCtpmv(char uplo, char trans, char di | |
| * void | | ag, int n, const cuComplex *AP, cuComplex *x, int incx) | |
| * cublasStpsv (char uplo, char trans, char diag, int n, const float *AP, | | { | |
| * float *X, int incx) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasCtpmv_v2(handle, convertToFillMode(uplo), | |
| * solves one of the systems of equations op(A)*x = b, where op(A) is eithe | | convertToOp(trans), | |
| r | | convertToDiagType(diag), n, AP, x, incx | |
| * op(A) = A or op(A) = transpose(A). b and x are n element vectors, and A | | ); | |
| is | | cublasSetError(error); | |
| * an n x n unit or non-unit, upper or lower triangular matrix. No test for | | | |
| * singularity or near-singularity is included in this function. Such tests | | | |
| * must be performed before calling this function. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix is an upper or lower triangular matr | | | |
| ix | | | |
| * as follows: If uplo == 'U' or 'u', A is an upper triangluar matri | | | |
| x. | | | |
| * If uplo == 'L' or 'l', A is a lower triangular matrix. | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A). | | | |
| * diag specifies whether A is unit triangular. If diag == 'U' or 'u', A | | | |
| is | | | |
| * assumed to be unit triangular; thas is, diagonal elements are not | | | |
| * read and are assumed to be unity. If diag == 'N' or 'n', A is not | | | |
| * assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. In the current implementation n must not exceed 40 | | | |
| 70. | | | |
| * AP single precision array with at least ((n*(n+1))/2) elements. If u | | | |
| plo | | | |
| * == 'U' or 'u', the array AP contains the upper triangular matrix | | | |
| A, | | | |
| * packed sequentially, column by column; that is, if i <= j, then | | | |
| * A[i,j] is stored is AP[i+(j*(j+1)/2)]. If uplo == 'L' or 'L', the | | | |
| * array AP contains the lower triangular matrix A, packed sequentia | | | |
| lly, | | | |
| * column by column; that is, if i >= j, then A[i,j] is stored in | | | |
| * AP[i+((2*n-j+1)*j)/2]. When diag = 'U' or 'u', the diagonal eleme | | | |
| nts | | | |
| * of A are not referenced and are assumed to be unity. | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * On entry, x contains the n-element right-hand side vector b. On e | | | |
| xit, | | | |
| * it is overwritten with the solution vector x. | | | |
| * incx storage spacing between elements of x. It must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/stpsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0, n < 0, or n > 4070 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasStpsv (char uplo, char trans, char diag, int n, | | | |
| const float *AP, float *x, int incx); | | | |
| | | | |
|
| /* | | } | |
| * void | | | |
| * cublasStrmv (char uplo, char trans, char diag, int n, const float *A, | | | |
| * int lda, float *x, int incx); | | | |
| * | | | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | | |
| = | | | |
| = A, or op(A) = transpose(A). x is an n-element single precision vector, a | | | |
| nd | | | |
| * A is an n x n, unit or non-unit, upper or lower, triangular matrix compo | | | |
| sed | | | |
| * of single precision elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo = 'U' or 'u', then A is an upper triangular matri | | | |
| x. | | | |
| * If uplo = 'L' or 'l', then A is a lower triangular matrix. | | | |
| * trans specifies op(A). If transa = 'N' or 'n', op(A) = A. If trans = 'T | | | |
| ', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A) | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag = ' | | | |
| U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag = 'N' or 'n', | | | |
| A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * A single precision array of dimension (lda, n). If uplo = 'U' or 'u | | | |
| ', | | | |
| * the leading n x n upper triangular part of the array A must conta | | | |
| in | | | |
| * the upper triangular matrix and the strictly lower triangular par | | | |
| t | | | |
| * of A is not referenced. If uplo = 'L' or 'l', the leading n x n l | | | |
| ower | | | |
| * triangular part of the array A must contain the lower triangular | | | |
| * matrix and the strictly upper triangular part of A is not referen | | | |
| ced. | | | |
| * When diag = 'U' or 'u', the diagonal elements of A are not refere | | | |
| nced | | | |
| * either, but are are assumed to be unity. | | | |
| * lda is the leading dimension of A. It must be at least max (1, n). | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| ) ). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x, | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/strmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasStrmv (char uplo, char trans, char diag, int n, | | | |
| const float *A, int lda, float *x, int incx); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasZtpmv(char uplo, char trans, char di | |
| * void | | ag, int n, const cuDoubleComplex *AP, cuDoubleComplex *x, int incx) | |
| * cublasStrsv (char uplo, char trans, char diag, int n, const float *A, | | { | |
| * int lda, float *x, int incx) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasZtpmv_v2(handle, convertToFillMode(uplo), | |
| * solves a system of equations op(A) * x = b, where op(A) is either A or | | convertToOp(trans), | |
| * transpose(A). b and x are single precision vectors consisting of n | | convertToDiagType(diag), n, AP, x, incx | |
| * elements, and A is an n x n matrix composed of a unit or non-unit, upper | | ); | |
| * or lower triangular matrix. Matrix A is stored in column major format, | | cublasSetError(error); | |
| * and lda is the leading dimension of the two-dimensional array containing | | | |
| * A. | | | |
| * | | | |
| * No test for singularity or near-singularity is included in this function | | | |
| . | | | |
| * Such tests must be performed before calling this function. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the | | | |
| * lower triangular part of array A. If uplo = 'U' or 'u', then only | | | |
| * the upper triangular part of A may be referenced. If uplo = 'L' o | | | |
| r | | | |
| * 'l', then only the lower triangular part of A may be referenced. | | | |
| * trans specifies op(A). If transa = 'n' or 'N', op(A) = A. If transa = ' | | | |
| t', | | | |
| * 'T', 'c', or 'C', op(A) = transpose(A) | | | |
| * diag specifies whether or not A is a unit triangular matrix like so: | | | |
| * if diag = 'U' or 'u', A is assumed to be unit triangular. If | | | |
| * diag = 'N' or 'n', then A is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. It | | | |
| * must be at least 0. | | | |
| * A is a single precision array of dimensions (lda, n). If uplo = 'U' | | | |
| * or 'u', then A must contains the upper triangular part of a symme | | | |
| tric | | | |
| * matrix, and the strictly lower triangular parts is not referenced | | | |
| . | | | |
| * If uplo = 'L' or 'l', then A contains the lower triangular part o | | | |
| f | | | |
| * a symmetric matrix, and the strictly upper triangular part is not | | | |
| * referenced. | | | |
| * lda is the leading dimension of the two-dimensional array containing | | | |
| A. | | | |
| * lda must be at least max(1, n). | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * On entry, x contains the n element right-hand side vector b. On e | | | |
| xit, | | | |
| * it is overwritten with the solution vector x. | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/strsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasStrsv (char uplo, char trans, char diag, int n, | | | |
| const float *A, int lda, float *x, int incx); | | | |
| | | | |
|
| /* ----------------- CUBLAS double complex BLAS2 functions ---------------- | | } | |
| - */ | | /*------------------------------------------------------------------------* | |
| | | / | |
| | | /* TRSV */ | |
| | | static __inline__ void CUBLASAPI cublasStrsv(char uplo, char trans, char di | |
| | | ag, int n, const float *A, int lda, float *x, int incx) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasStrsv_v2(handle, convertToFillMode(uplo), | |
| | | convertToOp(trans), | |
| | | convertToDiagType(diag), n, A, lda, x, i | |
| | | ncx ); | |
| | | cublasSetError(error); | |
| | | } | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasDtrsv(char uplo, char trans, char di | |
| * void | | ag, int n, const double *A, int lda, double *x, int incx) | |
| * cublasZtrmv (char uplo, char trans, char diag, int n, const cuDoubleComp | | { | |
| lex *A, | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * int lda, cuDoubleComplex *x, int incx); | | cublasStatus_t error = cublasDtrsv_v2(handle, convertToFillMode(uplo), | |
| * | | convertToOp(trans), | |
| * performs one of the matrix-vector operations x = op(A) * x, | | convertToDiagType(diag), n, A, lda, x, i | |
| * where op(A) = A, or op(A) = transpose(A) or op(A) = conjugate(transpose( | | ncx ); | |
| A)). | | cublasSetError(error); | |
| * x is an n-element double precision complex vector, and | | } | |
| * A is an n x n, unit or non-unit, upper or lower, triangular matrix compo | | | |
| sed | | | |
| * of double precision complex elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo = 'U' or 'u', then A is an upper triangular matri | | | |
| x. | | | |
| * If uplo = 'L' or 'l', then A is a lower triangular matrix. | | | |
| * trans specifies op(A). If trans = 'n' or 'N', op(A) = A. If trans = 't' | | | |
| or | | | |
| * 'T', op(A) = transpose(A). If trans = 'c' or 'C', op(A) = | | | |
| * conjugate(transpose(A)). | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag = ' | | | |
| U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag = 'N' or 'n', | | | |
| A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * A double precision array of dimension (lda, n). If uplo = 'U' or 'u | | | |
| ', | | | |
| * the leading n x n upper triangular part of the array A must conta | | | |
| in | | | |
| * the upper triangular matrix and the strictly lower triangular par | | | |
| t | | | |
| * of A is not referenced. If uplo = 'L' or 'l', the leading n x n l | | | |
| ower | | | |
| * triangular part of the array A must contain the lower triangular | | | |
| * matrix and the strictly upper triangular part of A is not referen | | | |
| ced. | | | |
| * When diag = 'U' or 'u', the diagonal elements of A are not refere | | | |
| nced | | | |
| * either, but are are assumed to be unity. | | | |
| * lda is the leading dimension of A. It must be at least max (1, n). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| ) ). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x, | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ztrmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZtrmv (char uplo, char trans, char diag, int n, | | | |
| const cuDoubleComplex *A, int lda, cuDoubleComp | | | |
| lex *x, | | | |
| int incx); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasCtrsv(char uplo, char trans, char di | |
| * void | | ag, int n, const cuComplex *A, int lda, cuComplex *x, int incx) | |
| * cublasZgbmv (char trans, int m, int n, int kl, int ku, cuDoubleComplex a | | { | |
| lpha, | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * const cuDoubleComplex *A, int lda, const cuDoubleComplex *x | | cublasStatus_t error = cublasCtrsv_v2(handle, convertToFillMode(uplo), | |
| , int incx, cuDoubleComplex beta, | | convertToOp(trans), | |
| * cuDoubleComplex *y, int incy); | | convertToDiagType(diag), n, A, lda, x, i | |
| * | | ncx ); | |
| * performs one of the matrix-vector operations | | cublasSetError(error); | |
| * | | } | |
| * y = alpha*op(A)*x + beta*y, op(A)=A or op(A) = transpose(A) | | | |
| * | | | |
| * alpha and beta are double precision complex scalars. x and y are double | | | |
| precision | | | |
| * complex vectors. A is an m by n band matrix consisting of double precisi | | | |
| on complex elements | | | |
| * with kl sub-diagonals and ku super-diagonals. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T', | | | |
| * or 't', op(A) = transpose(A). If trans == 'C' or 'c', | | | |
| * op(A) = conjugate(transpose(A)). | | | |
| * m specifies the number of rows of the matrix A. m must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. n must be at lea | | | |
| st | | | |
| * zero. | | | |
| * kl specifies the number of sub-diagonals of matrix A. It must be at | | | |
| * least zero. | | | |
| * ku specifies the number of super-diagonals of matrix A. It must be a | | | |
| t | | | |
| * least zero. | | | |
| * alpha double precision complex scalar multiplier applied to op(A). | | | |
| * A double precision complex array of dimensions (lda, n). The leadin | | | |
| g | | | |
| * (kl + ku + 1) x n part of the array A must contain the band matri | | | |
| x A, | | | |
| * supplied column by column, with the leading diagonal of the matri | | | |
| x | | | |
| * in row (ku + 1) of the array, the first super-diagonal starting a | | | |
| t | | | |
| * position 2 in row ku, the first sub-diagonal starting at position | | | |
| 1 | | | |
| * in row (ku + 2), and so on. Elements in the array A that do not | | | |
| * correspond to elements in the band matrix (such as the top left | | | |
| * ku x ku triangle) are not referenced. | | | |
| * lda leading dimension of A. lda must be at least (kl + ku + 1). | | | |
| * x double precision complex array of length at least (1+(n-1)*abs(in | | | |
| cx)) when | | | |
| * trans == 'N' or 'n' and at least (1+(m-1)*abs(incx)) otherwise. | | | |
| * incx specifies the increment for the elements of x. incx must not be z | | | |
| ero. | | | |
| * beta double precision complex scalar multiplier applied to vector y. I | | | |
| f beta is | | | |
| * zero, y is not read. | | | |
| * y double precision complex array of length at least (1+(m-1)*abs(in | | | |
| cy)) when | | | |
| * trans == 'N' or 'n' and at least (1+(n-1)*abs(incy)) otherwise. I | | | |
| f | | | |
| * beta is zero, y is not read. | | | |
| * incy On entry, incy specifies the increment for the elements of y. inc | | | |
| y | | | |
| * must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*op(A)*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zgbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZgbmv (char trans, int m, int n, int kl, int ku, | | | |
| cuDoubleComplex alpha, const cuDoubleComplex *A | | | |
| , int lda, | | | |
| const cuDoubleComplex *x, int incx, cuDoubleCom | | | |
| plex beta, | | | |
| cuDoubleComplex *y, int incy); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasZtrsv(char uplo, char trans, char di | |
| * void | | ag, int n, const cuDoubleComplex *A, int lda, | |
| * cublasZtbmv (char uplo, char trans, char diag, int n, int k, const cuDou | | cuDoubleComplex *x, int incx) | |
| bleComplex *A, | | { | |
| * int lda, cuDoubleComplex *x, int incx) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasZtrsv_v2(handle, convertToFillMode(uplo), | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | convertToOp(trans), | |
| = A, | | convertToDiagType(diag), n, A, lda, x, i | |
| * op(A) = transpose(A) or op(A) = conjugate(transpose(A)). x is an n-eleme | | ncx ); | |
| nt | | cublasSetError(error); | |
| * double precision complex vector, and A is an n x n, unit or non-unit, up | | } | |
| per | | /*------------------------------------------------------------------------* | |
| * or lower triangular band matrix composed of double precision complex ele | | / | |
| ments. | | /* TPSV */ | |
| * | | static __inline__ void CUBLASAPI cublasStpsv(char uplo, char trans, char di | |
| * Input | | ag, int n, const float *AP, | |
| * ----- | | float *x, int incx) | |
| * uplo specifies whether the matrix A is an upper or lower triangular ba | | { | |
| nd | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * matrix. If uplo == 'U' or 'u', A is an upper triangular band matr | | cublasStatus_t error = cublasStpsv_v2(handle, convertToFillMode(uplo), | |
| ix. | | convertToOp(trans), | |
| * If uplo == 'L' or 'l', A is a lower triangular band matrix. | | convertToDiagType(diag), n, AP, x, incx | |
| * trans specifies op(A). If transa == 'N' or 'n', op(A) = A. If trans == | | ); | |
| 'T', | | cublasSetError(error); | |
| * or 't', op(A) = transpose(A). If trans == 'C' or 'c', | | } | |
| * op(A) = conjugate(transpose(A)). | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag == | | | |
| 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag == 'N' or 'n' | | | |
| , A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * k specifies the number of super- or sub-diagonals. If uplo == 'U' o | | | |
| r | | | |
| * 'u', k specifies the number of super-diagonals. If uplo == 'L' or | | | |
| * 'l', k specifies the number of sub-diagonals. k must at least be | | | |
| * zero. | | | |
| * A double precision complex array of dimension (lda, n). If uplo == | | | |
| 'U' or 'u', | | | |
| * the leading (k + 1) x n part of the array A must contain the uppe | | | |
| r | | | |
| * triangular band matrix, supplied column by column, with the leadi | | | |
| ng | | | |
| * diagonal of the matrix in row (k + 1) of the array, the first | | | |
| * super-diagonal starting at position 2 in row k, and so on. The to | | | |
| p | | | |
| * left k x k triangle of the array A is not referenced. If uplo == | | | |
| 'L' | | | |
| * or 'l', the leading (k + 1) x n part of the array A must constain | | | |
| the | | | |
| * lower triangular band matrix, supplied column by column, with the | | | |
| * leading diagonal of the matrix in row 1 of the array, the first | | | |
| * sub-diagonal startingat position 1 in row 2, and so on. The botto | | | |
| m | | | |
| * right k x k triangle of the array is not referenced. | | | |
| * lda is the leading dimension of A. It must be at least (k + 1). | | | |
| * x double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ztbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n or k < 0, or if incx == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZtbmv (char uplo, char trans, char diag, int n, | | | |
| int k, const cuDoubleComplex *A, int lda, cuDoub | | | |
| leComplex *x, | | | |
| int incx); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasDtpsv(char uplo, char trans, char di | |
| * void cublasZtbsv (char uplo, char trans, char diag, int n, int k, | | ag, int n, const double *AP, double *x, int incx) | |
| * const cuDoubleComplex *A, int lda, cuDoubleComplex *X, | | { | |
| int incx) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasDtpsv_v2(handle, convertToFillMode(uplo), | |
| * solves one of the systems of equations op(A)*x = b, where op(A) is eithe | | convertToOp(trans), | |
| r | | convertToDiagType(diag), n, AP, x, incx | |
| * op(A) = A , op(A) = transpose(A) or op(A) = conjugate(transpose(A)). | | ); | |
| * b and x are n element vectors, and A is an n x n unit or non-unit, | | cublasSetError(error); | |
| * upper or lower triangular band matrix with k + 1 diagonals. No test | | } | |
| * for singularity or near-singularity is included in this function. | | | |
| * Such tests must be performed before calling this function. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix is an upper or lower triangular band | | | |
| * matrix as follows: If uplo == 'U' or 'u', A is an upper triangula | | | |
| r | | | |
| * band matrix. If uplo == 'L' or 'l', A is a lower triangular band | | | |
| * matrix. | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T', | | | |
| * 't', op(A) = transpose(A). If trans == 'C' or 'c', | | | |
| * op(A) = conjugate(transpose(A)). | | | |
| * diag specifies whether A is unit triangular. If diag == 'U' or 'u', A | | | |
| is | | | |
| * assumed to be unit triangular; thas is, diagonal elements are not | | | |
| * read and are assumed to be unity. If diag == 'N' or 'n', A is not | | | |
| * assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * k specifies the number of super- or sub-diagonals. If uplo == 'U' o | | | |
| r | | | |
| * 'u', k specifies the number of super-diagonals. If uplo == 'L' or | | | |
| * 'l', k specifies the number of sub-diagonals. k must at least be | | | |
| * zero. | | | |
| * A double precision complex array of dimension (lda, n). If uplo == | | | |
| 'U' or 'u', | | | |
| * the leading (k + 1) x n part of the array A must contain the uppe | | | |
| r | | | |
| * triangular band matrix, supplied column by column, with the leadi | | | |
| ng | | | |
| * diagonal of the matrix in row (k + 1) of the array, the first sup | | | |
| er- | | | |
| * diagonal starting at position 2 in row k, and so on. The top left | | | |
| * k x k triangle of the array A is not referenced. If uplo == 'L' o | | | |
| r | | | |
| * 'l', the leading (k + 1) x n part of the array A must constain th | | | |
| e | | | |
| * lower triangular band matrix, supplied column by column, with the | | | |
| * leading diagonal of the matrix in row 1 of the array, the first | | | |
| * sub-diagonal starting at position 1 in row 2, and so on. The bott | | | |
| om | | | |
| * right k x k triangle of the array is not referenced. | | | |
| * x double precision complex array of length at least (1+(n-1)*abs(in | | | |
| cx)). | | | |
| * incx storage spacing between elements of x. It must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ztbsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0, n < 0 or n > 1016 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZtbsv (char uplo, char trans, char diag, int n, | | | |
| int k, const cuDoubleComplex *A, int lda, cuDou | | | |
| bleComplex *x, | | | |
| int incx); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasCtpsv(char uplo, char trans, char di | |
| * void | | ag, int n, const cuComplex *AP, cuComplex *x, int incx) | |
| * cublasZhemv (char uplo, int n, cuDoubleComplex alpha, const cuDoubleComp | | { | |
| lex *A, int lda, | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * const cuDoubleComplex *x, int incx, cuDoubleComplex beta, c | | cublasStatus_t error = cublasCtpsv_v2(handle, convertToFillMode(uplo), | |
| uDoubleComplex *y, int incy) | | convertToOp(trans), | |
| * | | convertToDiagType(diag), n, AP, x, incx | |
| * performs the matrix-vector operation | | ); | |
| * | | cublasSetError(error); | |
| * y = alpha*A*x + beta*y | | } | |
| * | | | |
| * Alpha and beta are double precision complex scalars, and x and y are dou | | | |
| ble | | | |
| * precision complex vectors, each with n elements. A is a hermitian n x n | | | |
| matrix | | | |
| * consisting of double precision complex elements that is stored in either | | | |
| upper or | | | |
| * lower storage mode. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the upper or lower triangular part of the array | | | |
| A | | | |
| * is to be referenced. If uplo == 'U' or 'u', the hermitian matrix | | | |
| A | | | |
| * is stored in upper storage mode, i.e. only the upper triangular p | | | |
| art | | | |
| * of A is to be referenced while the lower triangular part of A is | | | |
| to | | | |
| * be inferred. If uplo == 'L' or 'l', the hermitian matrix A is sto | | | |
| red | | | |
| * in lower storage mode, i.e. only the lower triangular part of A i | | | |
| s | | | |
| * to be referenced while the upper triangular part of A is to be | | | |
| * inferred. | | | |
| * n specifies the number of rows and the number of columns of the | | | |
| * hermitian matrix A. n must be at least zero. | | | |
| * alpha double precision complex scalar multiplier applied to A*x. | | | |
| * A double precision complex array of dimensions (lda, n). If uplo == | | | |
| 'U' or 'u', | | | |
| * the leading n x n upper triangular part of the array A must conta | | | |
| in | | | |
| * the upper triangular part of the hermitian matrix and the strictl | | | |
| y | | | |
| * lower triangular part of A is not referenced. If uplo == 'L' or ' | | | |
| l', | | | |
| * the leading n x n lower triangular part of the array A must conta | | | |
| in | | | |
| * the lower triangular part of the hermitian matrix and the strictl | | | |
| y | | | |
| * upper triangular part of A is not referenced. The imaginary parts | | | |
| * of the diagonal elements need not be set, they are assumed to be | | | |
| zero. | | | |
| * lda leading dimension of A. It must be at least max (1, n). | | | |
| * x double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta double precision complex scalar multiplier applied to vector y. | | | |
| * y double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incy)). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zhemv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZhemv (char uplo, int n, cuDoubleComplex alpha, | | | |
| const cuDoubleComplex *A, int lda, const cuDoub | | | |
| leComplex *x, | | | |
| int incx, cuDoubleComplex beta, cuDoubleComplex | | | |
| *y, int incy); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasZtpsv(char uplo, char trans, char di | |
| * void | | ag, int n, const cuDoubleComplex *AP, | |
| * cublasZhpmv (char uplo, int n, cuDoubleComplex alpha, const cuDoubleComp | | cuDoubleComplex *x, int incx) | |
| lex *AP, const cuDoubleComplex *x, | | { | |
| * int incx, cuDoubleComplex beta, cuDoubleComplex *y, int inc | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| y) | | cublasStatus_t error = cublasZtpsv_v2(handle, convertToFillMode(uplo), | |
| * | | convertToOp(trans), | |
| * performs the matrix-vector operation | | convertToDiagType(diag), n, AP, x, incx | |
| * | | ); | |
| * y = alpha * A * x + beta * y | | cublasSetError(error); | |
| * | | } | |
| * Alpha and beta are double precision complex scalars, and x and y are dou | | /*------------------------------------------------------------------------* | |
| ble | | / | |
| * precision complex vectors with n elements. A is an hermitian n x n matri | | /* TBSV */ | |
| x | | static __inline__ void CUBLASAPI cublasStbsv(char uplo, char trans, | |
| * consisting of double precision complex elements that is supplied in pack | | char diag, int n, int k, const flo | |
| ed form. | | at *A, | |
| * | | int lda, float *x, int incx) | |
| * Input | | { | |
| * ----- | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | cublasStatus_t error = cublasStbsv_v2(handle, convertToFillMode(uplo), | |
| ower | | convertToOp(trans), | |
| * triangular part of array AP. If uplo == 'U' or 'u', then the uppe | | convertToDiagType(diag), n, k, A, lda, x | |
| r | | , incx ); | |
| * triangular part of A is supplied in AP. If uplo == 'L' or 'l', th | | cublasSetError(error); | |
| en | | } | |
| * the lower triangular part of A is supplied in AP. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha double precision complex scalar multiplier applied to A*x. | | | |
| * AP double precision complex array with at least ((n * (n + 1)) / 2) | | | |
| elements. If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * The imaginary parts of the diagonal elements need not be set, the | | | |
| y | | | |
| * are assumed to be zero. | | | |
| * x double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta double precision complex scalar multiplier applied to vector y; | | | |
| * y double precision array of length at least (1 + (n - 1) * abs(incy | | | |
| )). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zhpmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZhpmv (char uplo, int n, cuDoubleComplex alpha, | | | |
| const cuDoubleComplex *AP, const cuDoubleComple | | | |
| x *x, | | | |
| int incx, cuDoubleComplex beta, cuDoubleComplex | | | |
| *y, int incy); | | | |
| | | | |
|
| /* ----------------- CUBLAS double complex BLAS3 functions ---------------- | | static __inline__ void CUBLASAPI cublasDtbsv(char uplo, char trans, | |
| - */ | | char diag, int n, int k, const dou | |
| | | ble *A, | |
| | | int lda, double *x, int incx) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDtbsv_v2(handle, convertToFillMode(uplo), | |
| | | convertToOp(trans), | |
| | | convertToDiagType(diag), n, k, A, lda, x | |
| | | , incx ); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasCtbsv(char uplo, char trans, | |
| | | char diag, int n, int k, const cuC | |
| | | omplex *A, | |
| | | int lda, cuComplex *x, int incx) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasCtbsv_v2(handle, convertToFillMode(uplo), | |
| | | convertToOp(trans), | |
| | | convertToDiagType(diag), n, k, A, lda, x | |
| | | , incx ); | |
| | | cublasSetError(error); | |
| | | } | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasZtbsv(char uplo, char trans, | |
| * cublasZgemv (char trans, int m, int n, cuDoubleComplex alpha, const cuDo | | char diag, int n, int k, const cuD | |
| ubleComplex *A, int lda, | | oubleComplex *A, | |
| * const cuDoubleComplex *x, int incx, cuDoubleComplex beta, c | | int lda, cuDoubleComplex *x, int i | |
| uDoubleComplex *y, int incy) | | ncx) | |
| * | | { | |
| * performs one of the matrix-vector operations | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasZtbsv_v2(handle, convertToFillMode(uplo), | |
| * y = alpha * op(A) * x + beta * y, | | convertToOp(trans), | |
| * | | convertToDiagType(diag), n, k, A, lda, x | |
| * where op(A) is one of | | , incx ); | |
| * | | cublasSetError(error); | |
| * op(A) = A or op(A) = transpose(A) | | } | |
| * | | /*------------------------------------------------------------------------* | |
| * where alpha and beta are double precision scalars, x and y are double | | / | |
| * precision vectors, and A is an m x n matrix consisting of double precisi | | /* SYMV/HEMV */ | |
| on | | static __inline__ void CUBLASAPI cublasSsymv (char uplo, int n, float alpha | |
| * elements. Matrix A is stored in column major format, and lda is the lead | | , const float *A, | |
| ing | | int lda, const float *x, int incx, float beta, | |
| * dimension of the two-dimensional array in which A is stored. | | float *y, int incy) | |
| * | | { | |
| * Input | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * ----- | | cublasStatus_t error = cublasSsymv_v2(handle, convertToFillMode(uplo), | |
| * trans specifies op(A). If transa = 'n' or 'N', op(A) = A. If trans = | | n, &alpha, A, lda, x, incx, &beta, y, in | |
| * trans = 't', 'T', 'c', or 'C', op(A) = transpose(A) | | cy ); | |
| * m specifies the number of rows of the matrix A. m must be at least | | cublasSetError(error); | |
| * zero. | | } | |
| * n specifies the number of columns of the matrix A. n must be at lea | | static __inline__ void CUBLASAPI cublasDsymv (char uplo, int n, double alph | |
| st | | a, const double *A, | |
| * zero. | | int lda, const double *x, int incx, double beta | |
| * alpha double precision scalar multiplier applied to op(A). | | , | |
| * A double precision array of dimensions (lda, n) if trans = 'n' or | | double *y, int incy) | |
| * 'N'), and of dimensions (lda, m) otherwise. lda must be at least | | { | |
| * max(1, m) and at least max(1, n) otherwise. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * lda leading dimension of two-dimensional array used to store matrix A | | cublasStatus_t error = cublasDsymv_v2(handle, convertToFillMode(uplo), | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | n, &alpha, A, lda, x, incx, &beta, y, in | |
| )) | | cy ); | |
| * when trans = 'N' or 'n' and at least (1 + (m - 1) * abs(incx)) | | cublasSetError(error); | |
| * otherwise. | | } | |
| * incx specifies the storage spacing between elements of x. incx must no | | static __inline__ void CUBLASAPI cublasChemv (char uplo, int n, cuComplex a | |
| t | | lpha, const cuComplex *A, | |
| * be zero. | | int lda, const cuComplex *x, int incx, cuComple | |
| * beta double precision scalar multiplier applied to vector y. If beta | | x beta, | |
| * is zero, y is not read. | | cuComplex *y, int incy) | |
| * y double precision array of length at least (1 + (m - 1) * abs(incy | | { | |
| )) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * when trans = 'N' or 'n' and at least (1 + (n - 1) * abs(incy)) | | cublasStatus_t error = cublasChemv_v2(handle, convertToFillMode(uplo), | |
| * otherwise. | | n, &alpha, A, lda, x, incx, &beta, y, in | |
| * incy specifies the storage spacing between elements of x. incx must no | | cy ); | |
| t | | cublasSetError(error); | |
| * be zero. | | } | |
| * | | static __inline__ void CUBLASAPI cublasZhemv (char uplo, int n, cuDoubleCom | |
| * Output | | plex alpha, const cuDoubleComplex *A, | |
| * ------ | | int lda, const cuDoubleComplex *x, int incx, cu | |
| * y updated according to alpha * op(A) * x + beta * y | | DoubleComplex beta, | |
| * | | cuDoubleComplex *y, int incy) | |
| * Reference: http://www.netlib.org/blas/zgemv.f | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Error status for this function can be retrieved via cublasGetError(). | | cublasStatus_t error = cublasZhemv_v2(handle, convertToFillMode(uplo), | |
| * | | n, &alpha, A, lda, x, incx, &beta, y, in | |
| * Error Status | | cy ); | |
| * ------------ | | cublasSetError(error); | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | } | |
| d | | /*------------------------------------------------------------------------* | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0, or if incx or incy == | | / | |
| 0 | | /* SBMV/HBMV */ | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | static __inline__ void CUBLASAPI cublasSsbmv (char uplo, int n, int k, floa | |
| */ | | t alpha, | |
| void CUBLASAPI cublasZgemv (char trans, int m, int n, cuDoubleComplex alpha | | const float *A, int lda, const float *x, int in | |
| , | | cx, | |
| | | float beta, float *y, int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasSsbmv_v2(handle, convertToFillMode(uplo),n | |
| | | ,k, &alpha, | |
| | | A, lda, x, incx, &beta, y, incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasDsbmv (char uplo, int n, int k, doub | |
| | | le alpha, | |
| | | const double *A, int lda, const double *x, int | |
| | | incx, | |
| | | double beta, double *y, int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDsbmv_v2(handle, convertToFillMode(uplo),n | |
| | | ,k, &alpha, | |
| | | A, lda, x, incx, &beta, y, incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasChbmv (char uplo, int n, int k, cuCo | |
| | | mplex alpha, | |
| | | const cuComplex *A, int lda, const cuComplex *x | |
| | | , int incx, | |
| | | cuComplex beta, cuComplex *y, int incy) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasChbmv_v2(handle, convertToFillMode(uplo),n | |
| | | ,k, &alpha, | |
| | | A, lda, x, incx, &beta, y, incy); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasZhbmv (char uplo, int n, int k, cuDo | |
| | | ubleComplex alpha, | |
| const cuDoubleComplex *A, int lda, const cuDoub
leComplex *x, int incx, | | const cuDoubleComplex *A, int lda, const cuDoub
leComplex *x, int incx, | |
|
| cuDoubleComplex beta, cuDoubleComplex *y, int i | | cuDoubleComplex beta, cuDoubleComplex *y, int i | |
| ncy); | | ncy) | |
| | | { | |
| /* | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * void | | cublasStatus_t error = cublasZhbmv_v2(handle, convertToFillMode(uplo),n | |
| * cublasZtpmv (char uplo, char trans, char diag, int n, const cuDoubleComp | | ,k, &alpha, | |
| lex *AP, | | A, lda, x, incx, &beta, y, incy); | |
| * cuDoubleComplex *x, int incx); | | cublasSetError(error); | |
| * | | } | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | /*------------------------------------------------------------------------* | |
| = A, | | / | |
| * op(A) = transpose(A) or op(A) = conjugate(transpose(A)) . x is an n elem | | /* SPMV/HPMV */ | |
| ent | | static __inline__ void CUBLASAPI cublasSspmv(char uplo, int n, float alpha, | |
| * double precision complex vector, and A is an n x n, unit or non-unit, up | | const float *AP, const float *x, | |
| per | | int incx, float beta, float *y, int in | |
| * or lower triangular matrix composed of double precision complex elements | | cy) | |
| . | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Input | | cublasStatus_t error = cublasSspmv_v2(handle, convertToFillMode(uplo),n | |
| * ----- | | , &alpha, | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | AP, x, incx, &beta, y, incy); | |
| * matrix. If uplo == 'U' or 'u', then A is an upper triangular matr | | cublasSetError(error); | |
| ix. | | } | |
| * If uplo == 'L' or 'l', then A is a lower triangular matrix. | | static __inline__ void CUBLASAPI cublasDspmv(char uplo, int n, double alpha | |
| * trans specifies op(A). If transa == 'N' or 'n', op(A) = A. If trans == | | , | |
| 'T', | | const double *AP, const double *x, | |
| * or 't', op(A) = transpose(A). If trans == 'C' or 'c', | | int incx, double beta, double *y, int | |
| * op(A) = conjugate(transpose(A)). | | incy) | |
| * | | { | |
| * diag specifies whether or not matrix A is unit triangular. If diag == | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| 'U' | | cublasStatus_t error = cublasDspmv_v2(handle, convertToFillMode(uplo),n | |
| * or 'u', A is assumed to be unit triangular. If diag == 'N' or 'n' | | , &alpha, | |
| , A | | AP, x, incx, &beta, y, incy); | |
| * is not assumed to be unit triangular. | | cublasSetError(error); | |
| * n specifies the number of rows and columns of the matrix A. n must | | } | |
| be | | static __inline__ void CUBLASAPI cublasChpmv(char uplo, int n, cuComplex al | |
| * at least zero. In the current implementation n must not exceed 40 | | pha, | |
| 70. | | const cuComplex *AP, const cuComplex * | |
| * AP double precision complex array with at least ((n * (n + 1)) / 2) | | x, | |
| elements. If | | int incx, cuComplex beta, cuComplex *y | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | , int incy) | |
| rt | | { | |
| * of the symmetric matrix A, packed sequentially, column by column; | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * that is, if i <= j, then A[i,j] is stored in AP[i+(j*(j+1)/2)]. I | | cublasStatus_t error = cublasChpmv_v2(handle, convertToFillMode(uplo),n | |
| f | | , &alpha, | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | AP, x, incx, &beta, y, incy); | |
| rt | | cublasSetError(error); | |
| * of the symmetric matrix A, packed sequentially, column by column; | | } | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | static __inline__ void CUBLASAPI cublasZhpmv(char uplo, int n, cuDoubleComp | |
| ]. | | lex alpha, | |
| * x double precision complex array of length at least (1 + (n - 1) * | | const cuDoubleComplex *AP, const cuDou | |
| abs(incx)). | | bleComplex *x, | |
| * On entry, x contains the source vector. On exit, x is overwritten | | int incx, cuDoubleComplex beta, cuDoub | |
| * with the result vector. | | leComplex *y, int incy) | |
| * incx specifies the storage spacing for elements of x. incx must not be | | { | |
| * zero. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasZhpmv_v2(handle, convertToFillMode(uplo),n | |
| * Output | | , &alpha, | |
| * ------ | | AP, x, incx, &beta, y, incy); | |
| * x updated according to x = op(A) * x, | | cublasSetError(error); | |
| * | | } | |
| * Reference: http://www.netlib.org/blas/ztpmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or n < 0 | | | |
| * CUBLAS_STATUS_ALLOC_FAILED if function cannot allocate enough intern | | | |
| al scratch vector memory | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZtpmv (char uplo, char trans, char diag, int n, | | | |
| const cuDoubleComplex *AP, cuDoubleComplex *x, | | | |
| int incx); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZtpsv (char uplo, char trans, char diag, int n, const cuDoubleComp | | | |
| lex *AP, | | | |
| * cuDoubleComplex *X, int incx) | | | |
| * | | | |
| * solves one of the systems of equations op(A)*x = b, where op(A) is eithe | | | |
| r | | | |
| * op(A) = A , op(A) = transpose(A) or op(A) = conjugate(transpose)). b and | | | |
| * x are n element complex vectors, and A is an n x n unit or non-unit, | | | |
| * upper or lower triangular matrix. No test for singularity or near-singul | | | |
| arity | | | |
| * is included in this routine. Such tests must be performed before calling | | | |
| this routine. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix is an upper or lower triangular matr | | | |
| ix | | | |
| * as follows: If uplo == 'U' or 'u', A is an upper triangluar matri | | | |
| x. | | | |
| * If uplo == 'L' or 'l', A is a lower triangular matrix. | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T' | | | |
| * or 't', op(A) = transpose(A). If trans == 'C' or 'c', op(A) = | | | |
| * conjugate(transpose(A)). | | | |
| * diag specifies whether A is unit triangular. If diag == 'U' or 'u', A | | | |
| is | | | |
| * assumed to be unit triangular; thas is, diagonal elements are not | | | |
| * read and are assumed to be unity. If diag == 'N' or 'n', A is not | | | |
| * assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * AP double precision complex array with at least ((n*(n+1))/2) elemen | | | |
| ts. | | | |
| * If uplo == 'U' or 'u', the array AP contains the upper triangular | | | |
| * matrix A, packed sequentially, column by column; that is, if i <= | | | |
| j, then | | | |
| * A[i,j] is stored is AP[i+(j*(j+1)/2)]. If uplo == 'L' or 'L', the | | | |
| * array AP contains the lower triangular matrix A, packed sequentia | | | |
| lly, | | | |
| * column by column; that is, if i >= j, then A[i,j] is stored in | | | |
| * AP[i+((2*n-j+1)*j)/2]. When diag = 'U' or 'u', the diagonal eleme | | | |
| nts | | | |
| * of A are not referenced and are assumed to be unity. | | | |
| * x double precision complex array of length at least (1+(n-1)*abs(in | | | |
| cx)). | | | |
| * incx storage spacing between elements of x. It must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ztpsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 or n > 2035 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZtpsv (char uplo, char trans, char diag, int n, | | | |
| const cuDoubleComplex *AP, cuDoubleComplex *x, | | | |
| int incx); | | | |
| | | | |
| /* ----------------- CUBLAS single complex BLAS2 functions ---------------- | | | |
| - */ | | | |
| /* | | | |
| * cublasCgemv (char trans, int m, int n, cuComplex alpha, const cuComplex | | | |
| *A, | | | |
| * int lda, const cuComplex *x, int incx, cuComplex beta, cuCo | | | |
| mplex *y, | | | |
| * int incy) | | | |
| * | | | |
| * performs one of the matrix-vector operations | | | |
| * | | | |
| * y = alpha * op(A) * x + beta * y, | | | |
| * | | | |
| * where op(A) is one of | | | |
| * | | | |
| * op(A) = A or op(A) = transpose(A) or op(A) = conjugate(transpose( | | | |
| A)) | | | |
| * | | | |
| * where alpha and beta are single precision scalars, x and y are single | | | |
| * precision vectors, and A is an m x n matrix consisting of single precisi | | | |
| on | | | |
| * elements. Matrix A is stored in column major format, and lda is the lead | | | |
| ing | | | |
| * dimension of the two-dimensional array in which A is stored. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * trans specifies op(A). If transa = 'n' or 'N', op(A) = A. If trans = | | | |
| * trans = 't' or 'T', op(A) = transpose(A). If trans = 'c' or 'C', | | | |
| * op(A) = conjugate(transpose(A)) | | | |
| * m specifies the number of rows of the matrix A. m must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. n must be at lea | | | |
| st | | | |
| * zero. | | | |
| * alpha single precision scalar multiplier applied to op(A). | | | |
| * A single precision array of dimensions (lda, n) if trans = 'n' or | | | |
| * 'N'), and of dimensions (lda, m) otherwise. lda must be at least | | | |
| * max(1, m) and at least max(1, n) otherwise. | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )) | | | |
| * when trans = 'N' or 'n' and at least (1 + (m - 1) * abs(incx)) | | | |
| * otherwise. | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * beta single precision scalar multiplier applied to vector y. If beta | | | |
| * is zero, y is not read. | | | |
| * y single precision array of length at least (1 + (m - 1) * abs(incy | | | |
| )) | | | |
| * when trans = 'N' or 'n' and at least (1 + (n - 1) * abs(incy)) | | | |
| * otherwise. | | | |
| * incy specifies the storage spacing between elements of y. incy must no | | | |
| t | | | |
| * be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to alpha * op(A) * x + beta * y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cgemv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0, or if incx or incy == | | | |
| 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCgemv (char trans, int m, int n, cuComplex alpha, | | | |
| const cuComplex *A, int lda, const cuComplex *x | | | |
| , | | | |
| int incx, cuComplex beta, cuComplex *y, int inc | | | |
| y); | | | |
| /* | | | |
| * void | | | |
| * cublasCgbmv (char trans, int m, int n, int kl, int ku, cuComplex alpha, | | | |
| * const cuComplex *A, int lda, const cuComplex *x, int incx, | | | |
| cuComplex beta, | | | |
| * cuComplex *y, int incy); | | | |
| * | | | |
| * performs one of the matrix-vector operations | | | |
| * | | | |
| * y = alpha*op(A)*x + beta*y, op(A)=A or op(A) = transpose(A) | | | |
| * | | | |
| * alpha and beta are single precision complex scalars. x and y are single | | | |
| precision | | | |
| * complex vectors. A is an m by n band matrix consisting of single precisi | | | |
| on complex elements | | | |
| * with kl sub-diagonals and ku super-diagonals. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T', | | | |
| * or 't', op(A) = transpose(A). If trans == 'C' or 'c', | | | |
| * op(A) = conjugate(transpose(A)). | | | |
| * m specifies the number of rows of the matrix A. m must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. n must be at lea | | | |
| st | | | |
| * zero. | | | |
| * kl specifies the number of sub-diagonals of matrix A. It must be at | | | |
| * least zero. | | | |
| * ku specifies the number of super-diagonals of matrix A. It must be a | | | |
| t | | | |
| * least zero. | | | |
| * alpha single precision complex scalar multiplier applied to op(A). | | | |
| * A single precision complex array of dimensions (lda, n). The leadin | | | |
| g | | | |
| * (kl + ku + 1) x n part of the array A must contain the band matri | | | |
| x A, | | | |
| * supplied column by column, with the leading diagonal of the matri | | | |
| x | | | |
| * in row (ku + 1) of the array, the first super-diagonal starting a | | | |
| t | | | |
| * position 2 in row ku, the first sub-diagonal starting at position | | | |
| 1 | | | |
| * in row (ku + 2), and so on. Elements in the array A that do not | | | |
| * correspond to elements in the band matrix (such as the top left | | | |
| * ku x ku triangle) are not referenced. | | | |
| * lda leading dimension of A. lda must be at least (kl + ku + 1). | | | |
| * x single precision complex array of length at least (1+(n-1)*abs(in | | | |
| cx)) when | | | |
| * trans == 'N' or 'n' and at least (1+(m-1)*abs(incx)) otherwise. | | | |
| * incx specifies the increment for the elements of x. incx must not be z | | | |
| ero. | | | |
| * beta single precision complex scalar multiplier applied to vector y. I | | | |
| f beta is | | | |
| * zero, y is not read. | | | |
| * y single precision complex array of length at least (1+(m-1)*abs(in | | | |
| cy)) when | | | |
| * trans == 'N' or 'n' and at least (1+(n-1)*abs(incy)) otherwise. I | | | |
| f | | | |
| * beta is zero, y is not read. | | | |
| * incy On entry, incy specifies the increment for the elements of y. inc | | | |
| y | | | |
| * must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*op(A)*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cgbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCgbmv (char trans, int m, int n, int kl, int ku, | | | |
| cuComplex alpha, const cuComplex *A, int lda, | | | |
| const cuComplex *x, int incx, cuComplex beta, | | | |
| cuComplex *y, int incy); | | | |
| /* | | | |
| * void | | | |
| * cublasChemv (char uplo, int n, cuComplex alpha, const cuComplex *A, int | | | |
| lda, | | | |
| * const cuComplex *x, int incx, cuComplex beta, cuComplex *y, | | | |
| int incy) | | | |
| * | | | |
| * performs the matrix-vector operation | | | |
| * | | | |
| * y = alpha*A*x + beta*y | | | |
| * | | | |
| * Alpha and beta are single precision complex scalars, and x and y are sin | | | |
| gle | | | |
| * precision complex vectors, each with n elements. A is a hermitian n x n | | | |
| matrix | | | |
| * consisting of single precision complex elements that is stored in either | | | |
| upper or | | | |
| * lower storage mode. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the upper or lower triangular part of the array | | | |
| A | | | |
| * is to be referenced. If uplo == 'U' or 'u', the hermitian matrix | | | |
| A | | | |
| * is stored in upper storage mode, i.e. only the upper triangular p | | | |
| art | | | |
| * of A is to be referenced while the lower triangular part of A is | | | |
| to | | | |
| * be inferred. If uplo == 'L' or 'l', the hermitian matrix A is sto | | | |
| red | | | |
| * in lower storage mode, i.e. only the lower triangular part of A i | | | |
| s | | | |
| * to be referenced while the upper triangular part of A is to be | | | |
| * inferred. | | | |
| * n specifies the number of rows and the number of columns of the | | | |
| * hermitian matrix A. n must be at least zero. | | | |
| * alpha single precision complex scalar multiplier applied to A*x. | | | |
| * A single precision complex array of dimensions (lda, n). If uplo == | | | |
| 'U' or 'u', | | | |
| * the leading n x n upper triangular part of the array A must conta | | | |
| in | | | |
| * the upper triangular part of the hermitian matrix and the strictl | | | |
| y | | | |
| * lower triangular part of A is not referenced. If uplo == 'L' or ' | | | |
| l', | | | |
| * the leading n x n lower triangular part of the array A must conta | | | |
| in | | | |
| * the lower triangular part of the hermitian matrix and the strictl | | | |
| y | | | |
| * upper triangular part of A is not referenced. The imaginary parts | | | |
| * of the diagonal elements need not be set, they are assumed to be | | | |
| zero. | | | |
| * lda leading dimension of A. It must be at least max (1, n). | | | |
| * x single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta single precision complex scalar multiplier applied to vector y. | | | |
| * y single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incy)). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/chemv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| | | | |
| void CUBLASAPI cublasChemv (char uplo, int n, cuComplex alpha, | | | |
| const cuComplex *A, int lda, const cuComplex *x | | | |
| , | | | |
| int incx, cuComplex beta, cuComplex *y, int inc | | | |
| y); | | | |
| /* | | | |
| * void | | | |
| * cublasChbmv (char uplo, int n, int k, cuComplex alpha, const cuComplex * | | | |
| A, int lda, | | | |
| * const cuComplex *x, int incx, cuComplex beta, cuComplex *y, | | | |
| int incy) | | | |
| * | | | |
| * performs the matrix-vector operation | | | |
| * | | | |
| * y := alpha*A*x + beta*y | | | |
| * | | | |
| * alpha and beta are single precision complex scalars. x and y are single | | | |
| precision | | | |
| * complex vectors with n elements. A is an n by n hermitian band matrix co | | | |
| nsisting | | | |
| * of single precision complex elements, with k super-diagonals and the sam | | | |
| e number | | | |
| * of subdiagonals. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the upper or lower triangular part of the hermi | | | |
| tian | | | |
| * band matrix A is being supplied. If uplo == 'U' or 'u', the upper | | | |
| * triangular part is being supplied. If uplo == 'L' or 'l', the low | | | |
| er | | | |
| * triangular part is being supplied. | | | |
| * n specifies the number of rows and the number of columns of the | | | |
| * hermitian matrix A. n must be at least zero. | | | |
| * k specifies the number of super-diagonals of matrix A. Since the ma | | | |
| trix | | | |
| * is hermitian, this is also the number of sub-diagonals. k must be | | | |
| at | | | |
| * least zero. | | | |
| * alpha single precision complex scalar multiplier applied to A*x. | | | |
| * A single precision complex array of dimensions (lda, n). When uplo | | | |
| == 'U' or | | | |
| * 'u', the leading (k + 1) x n part of array A must contain the upp | | | |
| er | | | |
| * triangular band of the hermitian matrix, supplied column by colum | | | |
| n, | | | |
| * with the leading diagonal of the matrix in row (k+1) of the array | | | |
| , | | | |
| * the first super-diagonal starting at position 2 in row k, and so | | | |
| on. | | | |
| * The top left k x k triangle of the array A is not referenced. Whe | | | |
| n | | | |
| * uplo == 'L' or 'l', the leading (k + 1) x n part of the array A m | | | |
| ust | | | |
| * contain the lower triangular band part of the hermitian matrix, | | | |
| * supplied column by column, with the leading diagonal of the matri | | | |
| x in | | | |
| * row 1 of the array, the first sub-diagonal starting at position 1 | | | |
| in | | | |
| * row 2, and so on. The bottom right k x k triangle of the array A | | | |
| is | | | |
| * not referenced. The imaginary parts of the diagonal elements need | | | |
| * not be set, they are assumed to be zero. | | | |
| * lda leading dimension of A. lda must be at least (k + 1). | | | |
| * x single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta single precision complex scalar multiplier applied to vector y. I | | | |
| f beta is | | | |
| * zero, y is not read. | | | |
| * y single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incy)). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/chbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if k or n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasChbmv (char uplo, int n, int k, cuComplex alpha, | | | |
| const cuComplex *A, int lda, const cuComplex *x | | | |
| , | | | |
| int incx, cuComplex beta, cuComplex *y, int inc | | | |
| y); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasChpmv (char uplo, int n, cuComplex alpha, const cuComplex *AP, con | | | |
| st cuComplex *x, | | | |
| * int incx, cuComplex beta, cuComplex *y, int incy) | | | |
| * | | | |
| * performs the matrix-vector operation | | | |
| * | | | |
| * y = alpha * A * x + beta * y | | | |
| * | | | |
| * Alpha and beta are single precision complex scalars, and x and y are sin | | | |
| gle | | | |
| * precision complex vectors with n elements. A is an hermitian n x n matri | | | |
| x | | | |
| * consisting of single precision complex elements that is supplied in pack | | | |
| ed form. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array AP. If uplo == 'U' or 'u', then the uppe | | | |
| r | | | |
| * triangular part of A is supplied in AP. If uplo == 'L' or 'l', th | | | |
| en | | | |
| * the lower triangular part of A is supplied in AP. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha single precision complex scalar multiplier applied to A*x. | | | |
| * AP single precision complex array with at least ((n * (n + 1)) / 2) | | | |
| elements. If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * The imaginary parts of the diagonal elements need not be set, the | | | |
| y | | | |
| * are assumed to be zero. | | | |
| * x single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta single precision complex scalar multiplier applied to vector y; | | | |
| * y single precision array of length at least (1 + (n - 1) * abs(incy | | | |
| )). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/chpmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasChpmv (char uplo, int n, cuComplex alpha, | | | |
| const cuComplex *AP, const cuComplex *x, int in | | | |
| cx, | | | |
| cuComplex beta, cuComplex *y, int incy); | | | |
| | | | |
| /* | | | |
| * | | | |
| * cublasCtrmv (char uplo, char trans, char diag, int n, const cuComplex *A | | | |
| , | | | |
| * int lda, cuComplex *x, int incx); | | | |
| * | | | |
| * performs one of the matrix-vector operations x = op(A) * x, | | | |
| * where op(A) = A, or op(A) = transpose(A) or op(A) = conjugate(transpose( | | | |
| A)). | | | |
| * x is an n-element signle precision complex vector, and | | | |
| * A is an n x n, unit or non-unit, upper or lower, triangular matrix compo | | | |
| sed | | | |
| * of single precision complex elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo = 'U' or 'u', then A is an upper triangular matri | | | |
| x. | | | |
| * If uplo = 'L' or 'l', then A is a lower triangular matrix. | | | |
| * trans specifies op(A). If trans = 'n' or 'N', op(A) = A. If trans = 't' | | | |
| or | | | |
| * 'T', op(A) = transpose(A). If trans = 'c' or 'C', op(A) = | | | |
| * conjugate(transpose(A)). | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag = ' | | | |
| U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag = 'N' or 'n', | | | |
| A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * A single precision array of dimension (lda, n). If uplo = 'U' or 'u | | | |
| ', | | | |
| * the leading n x n upper triangular part of the array A must conta | | | |
| in | | | |
| * the upper triangular matrix and the strictly lower triangular par | | | |
| t | | | |
| * of A is not referenced. If uplo = 'L' or 'l', the leading n x n l | | | |
| ower | | | |
| * triangular part of the array A must contain the lower triangular | | | |
| * matrix and the strictly upper triangular part of A is not referen | | | |
| ced. | | | |
| * When diag = 'U' or 'u', the diagonal elements of A are not refere | | | |
| nced | | | |
| * either, but are are assumed to be unity. | | | |
| * lda is the leading dimension of A. It must be at least max (1, n). | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| ) ). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x, | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ctrmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCtrmv (char uplo, char trans, char diag, int n, | | | |
| const cuComplex *A, int lda, cuComplex *x, | | | |
| int incx); | | | |
| | | | |
|
| /* | | /*------------------------------------------------------------------------* | |
| * void | | / | |
| * cublasCtbmv (char uplo, char trans, char diag, int n, int k, const cuCom | | /* GER */ | |
| plex *A, | | static __inline__ void CUBLASAPI cublasSger (int m, int n, float alpha, con | |
| * int lda, cuComplex *x, int incx) | | st float *x, int incx, | |
| * | | const float *y, int incy, float *A, int lda) | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | { | |
| = A, | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * op(A) = transpose(A) or op(A) = conjugate(transpose(A)). x is an n-eleme | | cublasStatus_t error = cublasSger_v2(handle, m, n, &alpha, | |
| nt | | x, incx, y, incy, A, lda); | |
| * single precision complex vector, and A is an n x n, unit or non-unit, up | | cublasSetError(error); | |
| per | | } | |
| * or lower triangular band matrix composed of single precision complex ele | | static __inline__ void CUBLASAPI cublasDger (int m, int n, double alpha, co | |
| ments. | | nst double *x, int incx, | |
| * | | const double *y, int incy, double *A, int lda) | |
| * Input | | { | |
| * ----- | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * uplo specifies whether the matrix A is an upper or lower triangular ba | | cublasStatus_t error = cublasDger_v2(handle, m, n, &alpha, | |
| nd | | x, incx, y, incy, A, lda); | |
| * matrix. If uplo == 'U' or 'u', A is an upper triangular band matr | | cublasSetError(error); | |
| ix. | | } | |
| * If uplo == 'L' or 'l', A is a lower triangular band matrix. | | | |
| * trans specifies op(A). If transa == 'N' or 'n', op(A) = A. If trans == | | | |
| 'T', | | | |
| * or 't', op(A) = transpose(A). If trans == 'C' or 'c', | | | |
| * op(A) = conjugate(transpose(A)). | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag == | | | |
| 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag == 'N' or 'n' | | | |
| , A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * k specifies the number of super- or sub-diagonals. If uplo == 'U' o | | | |
| r | | | |
| * 'u', k specifies the number of super-diagonals. If uplo == 'L' or | | | |
| * 'l', k specifies the number of sub-diagonals. k must at least be | | | |
| * zero. | | | |
| * A single precision complex array of dimension (lda, n). If uplo == | | | |
| 'U' or 'u', | | | |
| * the leading (k + 1) x n part of the array A must contain the uppe | | | |
| r | | | |
| * triangular band matrix, supplied column by column, with the leadi | | | |
| ng | | | |
| * diagonal of the matrix in row (k + 1) of the array, the first | | | |
| * super-diagonal starting at position 2 in row k, and so on. The to | | | |
| p | | | |
| * left k x k triangle of the array A is not referenced. If uplo == | | | |
| 'L' | | | |
| * or 'l', the leading (k + 1) x n part of the array A must constain | | | |
| the | | | |
| * lower triangular band matrix, supplied column by column, with the | | | |
| * leading diagonal of the matrix in row 1 of the array, the first | | | |
| * sub-diagonal startingat position 1 in row 2, and so on. The botto | | | |
| m | | | |
| * right k x k triangle of the array is not referenced. | | | |
| * lda is the leading dimension of A. It must be at least (k + 1). | | | |
| * x single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ctbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n or k < 0, or if incx == 0 | | | |
| * CUBLAS_STATUS_ALLOC_FAILED if function cannot allocate enough intern | | | |
| al scratch vector memory | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCtbmv (char uplo, char trans, char diag, int n, int k, | | | |
| const cuComplex *A, int lda, cuComplex *x, | | | |
| int incx); | | | |
| /* | | | |
| * void | | | |
| * cublasCtpmv (char uplo, char trans, char diag, int n, const cuComplex *A | | | |
| P, | | | |
| * cuComplex *x, int incx); | | | |
| * | | | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | | |
| = A, | | | |
| * op(A) = transpose(A) or op(A) = conjugate(transpose(A)) . x is an n elem | | | |
| ent | | | |
| * single precision complex vector, and A is an n x n, unit or non-unit, up | | | |
| per | | | |
| * or lower triangular matrix composed of single precision complex elements | | | |
| . | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo == 'U' or 'u', then A is an upper triangular matr | | | |
| ix. | | | |
| * If uplo == 'L' or 'l', then A is a lower triangular matrix. | | | |
| * trans specifies op(A). If transa == 'N' or 'n', op(A) = A. If trans == | | | |
| 'T', | | | |
| * or 't', op(A) = transpose(A). If trans == 'C' or 'c', | | | |
| * op(A) = conjugate(transpose(A)). | | | |
| * | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag == | | | |
| 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag == 'N' or 'n' | | | |
| , A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. In the current implementation n must not exceed 40 | | | |
| 70. | | | |
| * AP single precision complex array with at least ((n * (n + 1)) / 2) | | | |
| elements. If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored in AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * x single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x, | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ctpmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or n < 0 | | | |
| * CUBLAS_STATUS_ALLOC_FAILED if function cannot allocate enough intern | | | |
| al scratch vector memory | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCtpmv (char uplo, char trans, char diag, int n, | | | |
| const cuComplex *AP, cuComplex *x, int incx); | | | |
| /* | | | |
| * void | | | |
| * cublasCtrsv (char uplo, char trans, char diag, int n, const cuComplex *A | | | |
| , | | | |
| * int lda, cuComplex *x, int incx) | | | |
| * | | | |
| * solves a system of equations op(A) * x = b, where op(A) is either A, | | | |
| * transpose(A) or conjugate(transpose(A)). b and x are single precision | | | |
| * complex vectors consisting of n elements, and A is an n x n matrix | | | |
| * composed of a unit or non-unit, upper or lower triangular matrix. | | | |
| * Matrix A is stored in column major format, and lda is the leading | | | |
| * dimension of the two-dimensional array containing A. | | | |
| * | | | |
| * No test for singularity or near-singularity is included in this function | | | |
| . | | | |
| * Such tests must be performed before calling this function. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the | | | |
| * lower triangular part of array A. If uplo = 'U' or 'u', then only | | | |
| * the upper triangular part of A may be referenced. If uplo = 'L' o | | | |
| r | | | |
| * 'l', then only the lower triangular part of A may be referenced. | | | |
| * trans specifies op(A). If transa = 'n' or 'N', op(A) = A. If transa = ' | | | |
| t', | | | |
| * 'T', 'c', or 'C', op(A) = transpose(A) | | | |
| * diag specifies whether or not A is a unit triangular matrix like so: | | | |
| * if diag = 'U' or 'u', A is assumed to be unit triangular. If | | | |
| * diag = 'N' or 'n', then A is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. It | | | |
| * must be at least 0. | | | |
| * A is a single precision complex array of dimensions (lda, n). If up | | | |
| lo = 'U' | | | |
| * or 'u', then A must contains the upper triangular part of a symme | | | |
| tric | | | |
| * matrix, and the strictly lower triangular parts is not referenced | | | |
| . | | | |
| * If uplo = 'L' or 'l', then A contains the lower triangular part o | | | |
| f | | | |
| * a symmetric matrix, and the strictly upper triangular part is not | | | |
| * referenced. | | | |
| * lda is the leading dimension of the two-dimensional array containing | | | |
| A. | | | |
| * lda must be at least max(1, n). | | | |
| * x single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * On entry, x contains the n element right-hand side vector b. On e | | | |
| xit, | | | |
| * it is overwritten with the solution vector x. | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ctrsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCtrsv (char uplo, char trans, char diag, int n, | | | |
| const cuComplex *A, int lda, cuComplex *x, | | | |
| int incx); | | | |
| /* | | | |
| * void cublasCtbsv (char uplo, char trans, char diag, int n, int k, | | | |
| * const cuComplex *A, int lda, cuComplex *X, int incx) | | | |
| * | | | |
| * solves one of the systems of equations op(A)*x = b, where op(A) is eithe | | | |
| r | | | |
| * op(A) = A , op(A) = transpose(A) or op(A) = conjugate(transpose(A)). | | | |
| * b and x are n element vectors, and A is an n x n unit or non-unit, | | | |
| * upper or lower triangular band matrix with k + 1 diagonals. No test | | | |
| * for singularity or near-singularity is included in this function. | | | |
| * Such tests must be performed before calling this function. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix is an upper or lower triangular band | | | |
| * matrix as follows: If uplo == 'U' or 'u', A is an upper triangula | | | |
| r | | | |
| * band matrix. If uplo == 'L' or 'l', A is a lower triangular band | | | |
| * matrix. | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T', | | | |
| * 't', op(A) = transpose(A). If trans == 'C' or 'c', | | | |
| * op(A) = conjugate(transpose(A)). | | | |
| * diag specifies whether A is unit triangular. If diag == 'U' or 'u', A | | | |
| is | | | |
| * assumed to be unit triangular; thas is, diagonal elements are not | | | |
| * read and are assumed to be unity. If diag == 'N' or 'n', A is not | | | |
| * assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * k specifies the number of super- or sub-diagonals. If uplo == 'U' o | | | |
| r | | | |
| * 'u', k specifies the number of super-diagonals. If uplo == 'L' or | | | |
| * 'l', k specifies the number of sub-diagonals. k must at least be | | | |
| * zero. | | | |
| * A single precision complex array of dimension (lda, n). If uplo == | | | |
| 'U' or 'u', | | | |
| * the leading (k + 1) x n part of the array A must contain the uppe | | | |
| r | | | |
| * triangular band matrix, supplied column by column, with the leadi | | | |
| ng | | | |
| * diagonal of the matrix in row (k + 1) of the array, the first sup | | | |
| er- | | | |
| * diagonal starting at position 2 in row k, and so on. The top left | | | |
| * k x k triangle of the array A is not referenced. If uplo == 'L' o | | | |
| r | | | |
| * 'l', the leading (k + 1) x n part of the array A must constain th | | | |
| e | | | |
| * lower triangular band matrix, supplied column by column, with the | | | |
| * leading diagonal of the matrix in row 1 of the array, the first | | | |
| * sub-diagonal starting at position 1 in row 2, and so on. The bott | | | |
| om | | | |
| * right k x k triangle of the array is not referenced. | | | |
| * x single precision complex array of length at least (1+(n-1)*abs(in | | | |
| cx)). | | | |
| * incx storage spacing between elements of x. It must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ctbsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0, n < 0 or n > 2035 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCtbsv (char uplo, char trans, char diag, int n, int k, | | | |
| const cuComplex *A, int lda, cuComplex *x, | | | |
| int incx); | | | |
| /* | | | |
| * void | | | |
| * cublasCtpsv (char uplo, char trans, char diag, int n, const cuComplex *A | | | |
| P, | | | |
| * cuComplex *X, int incx) | | | |
| * | | | |
| * solves one of the systems of equations op(A)*x = b, where op(A) is eithe | | | |
| r | | | |
| * op(A) = A , op(A) = transpose(A) or op(A) = conjugate(transpose)). b and | | | |
| * x are n element complex vectors, and A is an n x n unit or non-unit, | | | |
| * upper or lower triangular matrix. No test for singularity or near-singul | | | |
| arity | | | |
| * is included in this routine. Such tests must be performed before calling | | | |
| this routine. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix is an upper or lower triangular matr | | | |
| ix | | | |
| * as follows: If uplo == 'U' or 'u', A is an upper triangluar matri | | | |
| x. | | | |
| * If uplo == 'L' or 'l', A is a lower triangular matrix. | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T' | | | |
| * or 't', op(A) = transpose(A). If trans == 'C' or 'c', op(A) = | | | |
| * conjugate(transpose(A)). | | | |
| * diag specifies whether A is unit triangular. If diag == 'U' or 'u', A | | | |
| is | | | |
| * assumed to be unit triangular; thas is, diagonal elements are not | | | |
| * read and are assumed to be unity. If diag == 'N' or 'n', A is not | | | |
| * assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * AP single precision complex array with at least ((n*(n+1))/2) elemen | | | |
| ts. | | | |
| * If uplo == 'U' or 'u', the array AP contains the upper triangular | | | |
| * matrix A, packed sequentially, column by column; that is, if i <= | | | |
| j, then | | | |
| * A[i,j] is stored is AP[i+(j*(j+1)/2)]. If uplo == 'L' or 'L', the | | | |
| * array AP contains the lower triangular matrix A, packed sequentia | | | |
| lly, | | | |
| * column by column; that is, if i >= j, then A[i,j] is stored in | | | |
| * AP[i+((2*n-j+1)*j)/2]. When diag = 'U' or 'u', the diagonal eleme | | | |
| nts | | | |
| * of A are not referenced and are assumed to be unity. | | | |
| * x single precision complex array of length at least (1+(n-1)*abs(in | | | |
| cx)). | | | |
| * incx storage spacing between elements of x. It must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ctpsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 or n > 2035 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCtpsv (char uplo, char trans, char diag, int n, | | | |
| const cuComplex *AP, cuComplex *x, int incx); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasCgeru (int m, int n, cuComplex alpha | |
| * cublasCgeru (int m, int n, cuComplex alpha, const cuComplex *x, int incx | | , const cuComplex *x, | |
| , | | | |
| * const cuComplex *y, int incy, cuComplex *A, int lda) | | | |
| * | | | |
| * performs the symmetric rank 1 operation | | | |
| * | | | |
| * A = alpha * x * transpose(y) + A, | | | |
| * | | | |
| * where alpha is a single precision complex scalar, x is an m element sing | | | |
| le | | | |
| * precision complex vector, y is an n element single precision complex vec | | | |
| tor, and A | | | |
| * is an m by n matrix consisting of single precision complex elements. Mat | | | |
| rix A | | | |
| * is stored in column major format, and lda is the leading dimension of | | | |
| * the two-dimensional array used to store A. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * m specifies the number of rows of the matrix A. It must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. It must be at | | | |
| * least zero. | | | |
| * alpha single precision complex scalar multiplier applied to x * transpo | | | |
| se(y) | | | |
| * x single precision complex array of length at least (1 + (m - 1) * | | | |
| abs(incx)) | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * y single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incy)) | | | |
| * incy specifies the storage spacing between elements of y. incy must no | | | |
| t | | | |
| * be zero. | | | |
| * A single precision complex array of dimensions (lda, n). | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * transpose(y) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cgeru.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m <0, n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCgeru (int m, int n, cuComplex alpha, const cuComplex | | | |
| *x, | | | |
| int incx, const cuComplex *y, int incy, | | int incx, const cuComplex *y, int incy, | |
|
| cuComplex *A, int lda); | | cuComplex *A, int lda) | |
| /* | | { | |
| * cublasCgerc (int m, int n, cuComplex alpha, const cuComplex *x, int incx | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| , | | cublasStatus_t error = cublasCgeru_v2(handle, m, n, &alpha, | |
| * const cuComplex *y, int incy, cuComplex *A, int lda) | | x, incx, y, incy, A, lda); | |
| * | | cublasSetError(error); | |
| * performs the symmetric rank 1 operation | | } | |
| * | | static __inline__ void CUBLASAPI cublasCgerc (int m, int n, cuComplex alpha | |
| * A = alpha * x * conjugate(transpose(y)) + A, | | , const cuComplex *x, | |
| * | | | |
| * where alpha is a single precision complex scalar, x is an m element sing | | | |
| le | | | |
| * precision complex vector, y is an n element single precision complex vec | | | |
| tor, and A | | | |
| * is an m by n matrix consisting of single precision complex elements. Mat | | | |
| rix A | | | |
| * is stored in column major format, and lda is the leading dimension of | | | |
| * the two-dimensional array used to store A. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * m specifies the number of rows of the matrix A. It must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. It must be at | | | |
| * least zero. | | | |
| * alpha single precision complex scalar multiplier applied to x * transpo | | | |
| se(y) | | | |
| * x single precision complex array of length at least (1 + (m - 1) * | | | |
| abs(incx)) | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * y single precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incy)) | | | |
| * incy specifies the storage spacing between elements of y. incy must no | | | |
| t | | | |
| * be zero. | | | |
| * A single precision complex array of dimensions (lda, n). | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * conjugate(transpose(y)) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cgerc.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m <0, n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCgerc (int m, int n, cuComplex alpha, const cuComplex | | | |
| *x, | | | |
| int incx, const cuComplex *y, int incy, | | int incx, const cuComplex *y, int incy, | |
|
| cuComplex *A, int lda); | | cuComplex *A, int lda) | |
| /* | | { | |
| * void | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * cublasCher (char uplo, int n, float alpha, const cuComplex *x, int incx, | | cublasStatus_t error = cublasCgerc_v2(handle, m, n, &alpha, | |
| * cuComplex *A, int lda) | | x, incx, y, incy, A, lda); | |
| * | | cublasSetError(error); | |
| * performs the hermitian rank 1 operation | | } | |
| * | | static __inline__ void CUBLASAPI cublasZgeru (int m, int n, cuDoubleComplex | |
| * A = alpha * x * conjugate(transpose(x)) + A, | | alpha, const cuDoubleComplex *x, | |
| * | | int incx, const cuDoubleComplex *y, int incy, | |
| * where alpha is a single precision real scalar, x is an n element single | | cuDoubleComplex *A, int lda) | |
| * precision complex vector and A is an n x n hermitian matrix consisting o | | { | |
| f | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * single precision complex elements. Matrix A is stored in column major fo | | cublasStatus_t error = cublasZgeru_v2(handle, m, n, &alpha, | |
| rmat, | | x, incx, y, incy, A, lda); | |
| * and lda is the leading dimension of the two-dimensional array | | cublasSetError(error); | |
| * containing A. | | } | |
| * | | static __inline__ void CUBLASAPI cublasZgerc (int m, int n, cuDoubleComplex | |
| * Input | | alpha, const cuDoubleComplex *x, | |
| * ----- | | int incx, const cuDoubleComplex *y, int incy, | |
| * uplo specifies whether the matrix data is stored in the upper or | | cuDoubleComplex *A, int lda) | |
| * the lower triangular part of array A. If uplo = 'U' or 'u', | | { | |
| * then only the upper triangular part of A may be referenced. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * If uplo = 'L' or 'l', then only the lower triangular part of | | cublasStatus_t error = cublasZgerc_v2(handle, m, n, &alpha, | |
| * A may be referenced. | | x, incx, y, incy, A, lda); | |
| * n specifies the number of rows and columns of the matrix A. It | | cublasSetError(error); | |
| * must be at least 0. | | } | |
| * alpha single precision real scalar multiplier applied to | | /*------------------------------------------------------------------------* | |
| * x * conjugate(transpose(x)) | | / | |
| * x single precision complex array of length at least (1 + (n - 1) * | | /* SYR/HER */ | |
| abs(incx)) | | static __inline__ void CUBLASAPI cublasSsyr (char uplo, int n, float alpha, | |
| * incx specifies the storage spacing between elements of x. incx must | | const float *x, | |
| * not be zero. | | int incx, float *A, int lda) | |
| * A single precision complex array of dimensions (lda, n). If uplo = | | { | |
| 'U' or | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * 'u', then A must contain the upper triangular part of a hermitian | | cublasStatus_t error = cublasSsyr_v2(handle, convertToFillMode(uplo), n | |
| * matrix, and the strictly lower triangular part is not referenced. | | , &alpha, | |
| * If uplo = 'L' or 'l', then A contains the lower triangular part | | x, incx, A, lda); | |
| * of a hermitian matrix, and the strictly upper triangular part is | | cublasSetError(error); | |
| * not referenced. The imaginary parts of the diagonal elements need | | } | |
| * not be set, they are assumed to be zero, and on exit they | | static __inline__ void CUBLASAPI cublasDsyr (char uplo, int n, double alpha | |
| * are set to zero. | | , const double *x, | |
| * lda leading dimension of the two-dimensional array containing A. lda | | int incx, double *A, int lda) | |
| * must be at least max(1, n). | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Output | | cublasStatus_t error = cublasDsyr_v2(handle, convertToFillMode(uplo), n | |
| * ------ | | , &alpha, | |
| * A updated according to A = alpha * x * conjugate(transpose(x)) + A | | x, incx, A, lda); | |
| * | | cublasSetError(error); | |
| * Reference: http://www.netlib.org/blas/cher.f | | } | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or incx == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCher (char uplo, int n, float alpha, | | | |
| const cuComplex *x, int incx, cuComplex *A, | | | |
| int lda); | | | |
| /* | | | |
| * void | | | |
| * cublasChpr (char uplo, int n, float alpha, const cuComplex *x, int incx, | | | |
| * cuComplex *AP) | | | |
| * | | | |
| * performs the hermitian rank 1 operation | | | |
| * | | | |
| * A = alpha * x * conjugate(transpose(x)) + A, | | | |
| * | | | |
| * where alpha is a single precision real scalar and x is an n element sing | | | |
| le | | | |
| * precision complex vector. A is a hermitian n x n matrix consisting of si | | | |
| ngle | | | |
| * precision complex elements that is supplied in packed form. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array AP. If uplo == 'U' or 'u', then the uppe | | | |
| r | | | |
| * triangular part of A is supplied in AP. If uplo == 'L' or 'l', th | | | |
| en | | | |
| * the lower triangular part of A is supplied in AP. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha single precision real scalar multiplier applied to x * conjugate( | | | |
| transpose(x)). | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * AP single precision complex array with at least ((n * (n + 1)) / 2) | | | |
| elements. If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * The imaginary parts of the diagonal elements need not be set, the | | | |
| y | | | |
| * are assumed to be zero, and on exit they are set to zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * conjugate(transpose(x)) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/chpr.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or incx == 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasChpr (char uplo, int n, float alpha, | | | |
| const cuComplex *x, int incx, cuComplex *AP); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasCher (char uplo, int n, float alpha, | |
| * void | | const cuComplex *x, int incx, cuComplex *A, int | |
| * cublasChpr2 (char uplo, int n, cuComplex alpha, const cuComplex *x, int | | lda) | |
| incx, | | { | |
| * const cuComplex *y, int incy, cuComplex *AP) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasCher_v2(handle, convertToFillMode(uplo), n | |
| * performs the hermitian rank 2 operation | | , &alpha, | |
| * | | x, incx, A, lda); | |
| * A = alpha*x*conjugate(transpose(y)) + conjugate(alpha)*y*conjugate(tr | | cublasSetError(error); | |
| anspose(x)) + A, | | } | |
| * | | static __inline__ void CUBLASAPI cublasZher (char uplo, int n, double alpha | |
| * where alpha is a single precision complex scalar, and x and y are n elem | | , | |
| ent single | | const cuDoubleComplex *x, int incx, cuDoubleComp | |
| * precision complex vectors. A is a hermitian n x n matrix consisting of s | | lex *A, int lda) | |
| ingle | | { | |
| * precision complex elements that is supplied in packed form. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasZher_v2(handle, convertToFillMode(uplo), n | |
| * Input | | , &alpha, | |
| * ----- | | x, incx, A, lda); | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | cublasSetError(error); | |
| ower | | } | |
| * triangular part of array A. If uplo == 'U' or 'u', then only the | | | |
| * upper triangular part of A may be referenced and the lower triang | | | |
| ular | | | |
| * part of A is inferred. If uplo == 'L' or 'l', then only the lower | | | |
| * triangular part of A may be referenced and the upper triangular p | | | |
| art | | | |
| * of A is inferred. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha single precision complex scalar multiplier applied to x * conjuga | | | |
| te(transpose(y)) + | | | |
| * y * conjugate(transpose(x)). | | | |
| * x single precision complex array of length at least (1 + (n - 1) * | | | |
| abs (incx)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * y single precision complex array of length at least (1 + (n - 1) * | | | |
| abs (incy)). | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * AP single precision complex array with at least ((n * (n + 1)) / 2) | | | |
| elements. If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * The imaginary parts of the diagonal elements need not be set, the | | | |
| y | | | |
| * are assumed to be zero, and on exit they are set to zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha*x*conjugate(transpose(y)) | | | |
| * + conjugate(alpha)*y*conjugate(transpose(x | | | |
| ))+A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/chpr2.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasChpr2 (char uplo, int n, cuComplex alpha, | | | |
| const cuComplex *x, int incx, const cuComplex * | | | |
| y, | | | |
| int incy, cuComplex *AP); | | | |
| | | | |
|
| /* | | /*------------------------------------------------------------------------* | |
| * void cublasCher2 (char uplo, int n, cuComplex alpha, const cuComplex *x, | | / | |
| int incx, | | /* SPR/HPR */ | |
| * const cuComplex *y, int incy, cuComplex *A, int lda) | | static __inline__ void CUBLASAPI cublasSspr (char uplo, int n, float alpha, | |
| * | | const float *x, | |
| * performs the hermitian rank 2 operation | | int incx, float *AP) | |
| * | | { | |
| * A = alpha*x*conjugate(transpose(y)) + conjugate(alpha)*y*conjugate(tr | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| anspose(x)) + A, | | cublasStatus_t error = cublasSspr_v2(handle, convertToFillMode(uplo), n | |
| * | | , &alpha, | |
| * where alpha is a single precision complex scalar, x and y are n element | | x, incx, AP); | |
| single | | cublasSetError(error); | |
| * precision complex vector and A is an n by n hermitian matrix consisting | | } | |
| of single | | static __inline__ void CUBLASAPI cublasDspr (char uplo, int n, double alpha | |
| * precision complex elements. | | , const double *x, | |
| * | | int incx, double *AP) | |
| * Input | | { | |
| * ----- | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | cublasStatus_t error = cublasDspr_v2(handle, convertToFillMode(uplo), n | |
| ower | | , &alpha, | |
| * triangular part of array A. If uplo == 'U' or 'u', then only the | | x, incx, AP); | |
| * upper triangular part of A may be referenced and the lower triang | | cublasSetError(error); | |
| ular | | } | |
| * part of A is inferred. If uplo == 'L' or 'l', then only the lower | | static __inline__ void CUBLASAPI cublasChpr (char uplo, int n, float alpha, | |
| * triangular part of A may be referenced and the upper triangular p | | const cuComplex *x, | |
| art | | int incx, cuComplex *AP) | |
| * of A is inferred. | | { | |
| * n specifies the number of rows and columns of the matrix A. It must | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| be | | cublasStatus_t error = cublasChpr_v2(handle, convertToFillMode(uplo), n | |
| * at least zero. | | , &alpha, | |
| * alpha single precision complex scalar multiplier applied to x * conjuga | | x, incx, AP); | |
| te(transpose(y)) + | | cublasSetError(error); | |
| * y * conjugate(transpose(x)). | | } | |
| * x single precision array of length at least (1 + (n - 1) * abs (inc | | static __inline__ void CUBLASAPI cublasZhpr (char uplo, int n, double alpha | |
| x)). | | , const cuDoubleComplex *x, | |
| * incx storage spacing between elements of x. incx must not be zero. | | int incx, cuDoubleComplex *AP) | |
| * y single precision array of length at least (1 + (n - 1) * abs (inc | | { | |
| y)). | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * incy storage spacing between elements of y. incy must not be zero. | | cublasStatus_t error = cublasZhpr_v2(handle, convertToFillMode(uplo), n | |
| * A single precision complex array of dimensions (lda, n). If uplo == | | , &alpha, | |
| 'U' or 'u', | | x, incx, AP); | |
| * then A must contains the upper triangular part of a hermitian mat | | cublasSetError(error); | |
| rix, | | } | |
| * and the strictly lower triangular parts is not referenced. If upl | | /*------------------------------------------------------------------------* | |
| o == | | / | |
| * 'L' or 'l', then A contains the lower triangular part of a hermit | | /* SYR2/HER2 */ | |
| ian | | static __inline__ void CUBLASAPI cublasSsyr2 (char uplo, int n, float alpha | |
| * matrix, and the strictly upper triangular part is not referenced. | | , const float *x, | |
| * The imaginary parts of the diagonal elements need not be set, | | int incx, const float *y, int incy, float *A, | |
| * they are assumed to be zero, and on exit they are set to zero. | | int lda) | |
| * | | { | |
| * lda leading dimension of A. It must be at least max(1, n). | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasSsyr2_v2(handle, convertToFillMode(uplo), | |
| * Output | | n, &alpha, | |
| * ------ | | x, incx, y, incy, A, lda); | |
| * A updated according to A = alpha*x*conjugate(transpose(y)) | | cublasSetError(error); | |
| * + conjugate(alpha)*y*conjugate(transpose(x | | } | |
| ))+A | | static __inline__ void CUBLASAPI cublasDsyr2 (char uplo, int n, double alph | |
| * | | a, const double *x, | |
| * Reference: http://www.netlib.org/blas/cher2.f | | int incx, const double *y, int incy, double *A, | |
| * | | int lda) | |
| * Error status for this function can be retrieved via cublasGetError(). | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Error Status | | cublasStatus_t error = cublasDsyr2_v2(handle, convertToFillMode(uplo), | |
| * ------------ | | n, &alpha, | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | x, incx, y, incy, A, lda); | |
| d | | cublasSetError(error); | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | } | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | static __inline__ void CUBLASAPI cublasCher2 (char uplo, int n, cuComplex a | |
| */ | | lpha, const cuComplex *x, | |
| void CUBLASAPI cublasCher2 (char uplo, int n, cuComplex alpha, | | int incx, const cuComplex *y, int incy, cuCompl | |
| const cuComplex *x, int incx, const cuComplex * | | ex *A, | |
| y, | | int lda) | |
| int incy, cuComplex *A, int lda); | | { | |
| void CUBLASAPI cublasChpr2 (char uplo, int n, cuComplex alpha, | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| const cuComplex *x, int incx, const cuComplex * | | cublasStatus_t error = cublasCher2_v2(handle, convertToFillMode(uplo), | |
| y, | | n, &alpha, | |
| int incy, cuComplex *AP); | | x, incx, y, incy, A, lda); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasZher2 (char uplo, int n, cuDoubleCom | |
| | | plex alpha, const cuDoubleComplex *x, | |
| | | int incx, const cuDoubleComplex *y, int incy, c | |
| | | uDoubleComplex *A, | |
| | | int lda) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZher2_v2(handle, convertToFillMode(uplo), | |
| | | n, &alpha, | |
| | | x, incx, y, incy, A, lda); | |
| | | cublasSetError(error); | |
| | | } | |
| | | | |
|
| /* ---------------- CUBLAS single precision BLAS3 functions --------------- | | /*------------------------------------------------------------------------* | |
| - */ | | / | |
| | | /* SPR2/HPR2 */ | |
| | | static __inline__ void CUBLASAPI cublasSspr2 (char uplo, int n, float alpha | |
| | | , const float *x, | |
| | | int incx, const float *y, int incy, float *AP) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasSspr2_v2(handle, convertToFillMode(uplo), | |
| | | n, &alpha, | |
| | | x, incx, y, incy, AP); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasDspr2 (char uplo, int n, double alph | |
| | | a, | |
| | | const double *x, int incx, const double *y, | |
| | | int incy, double *AP) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasDspr2_v2(handle, convertToFillMode(uplo), | |
| | | n, &alpha, | |
| | | x, incx, y, incy, AP); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasChpr2 (char uplo, int n, cuComplex a | |
| | | lpha, | |
| | | const cuComplex *x, int incx, const cuComplex * | |
| | | y, | |
| | | int incy, cuComplex *AP) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasChpr2_v2(handle, convertToFillMode(uplo), | |
| | | n, &alpha, | |
| | | x, incx, y, incy, AP); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasZhpr2 (char uplo, int n, cuDoubleCom | |
| | | plex alpha, | |
| | | const cuDoubleComplex *x, int incx, const cuDou | |
| | | bleComplex *y, | |
| | | int incy, cuDoubleComplex *AP) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZhpr2_v2(handle, convertToFillMode(uplo), | |
| | | n, &alpha, | |
| | | x, incx, y, incy, AP); | |
| | | cublasSetError(error); | |
| | | | |
|
| /* | | } | |
| * void | | /* ------------------------BLAS3 Functions ------------------------------- | |
| * cublasSgemm (char transa, char transb, int m, int n, int k, float alpha, | | */ | |
| * const float *A, int lda, const float *B, int ldb, float bet | | /* GEMM */ | |
| a, | | static __inline__ void CUBLASAPI cublasSgemm (char transa, char transb, int | |
| * float *C, int ldc) | | m, int n, int k, | |
| * | | | |
| * computes the product of matrix A and matrix B, multiplies the result | | | |
| * by a scalar alpha, and adds the sum to the product of matrix C and | | | |
| * scalar beta. sgemm() performs one of the matrix-matrix operations: | | | |
| * | | | |
| * C = alpha * op(A) * op(B) + beta * C, | | | |
| * | | | |
| * where op(X) is one of | | | |
| * | | | |
| * op(X) = X or op(X) = transpose(X) | | | |
| * | | | |
| * alpha and beta are single precision scalars, and A, B and C are | | | |
| * matrices consisting of single precision elements, with op(A) an m x k | | | |
| * matrix, op(B) a k x n matrix, and C an m x n matrix. Matrices A, B, | | | |
| * and C are stored in column major format, and lda, ldb, and ldc are | | | |
| * the leading dimensions of the two-dimensional arrays containing A, | | | |
| * B, and C. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * transa specifies op(A). If transa = 'n' or 'N', op(A) = A. If | | | |
| * transa = 't', 'T', 'c', or 'C', op(A) = transpose(A) | | | |
| * transb specifies op(B). If transb = 'n' or 'N', op(B) = B. If | | | |
| * transb = 't', 'T', 'c', or 'C', op(B) = transpose(B) | | | |
| * m number of rows of matrix op(A) and rows of matrix C | | | |
| * n number of columns of matrix op(B) and number of columns of C | | | |
| * k number of columns of matrix op(A) and number of rows of op(B) | | | |
| * alpha single precision scalar multiplier applied to op(A)op(B) | | | |
| * A single precision array of dimensions (lda, k) if transa = | | | |
| * 'n' or 'N'), and of dimensions (lda, m) otherwise. When transa = | | | |
| * 'N' or 'n' then lda must be at least max( 1, m ), otherwise lda | | | |
| * must be at least max(1, k). | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| * B single precision array of dimensions (ldb, n) if transb = | | | |
| * 'n' or 'N'), and of dimensions (ldb, k) otherwise. When transb = | | | |
| * 'N' or 'n' then ldb must be at least max (1, k), otherwise ldb | | | |
| * must be at least max (1, n). | | | |
| * ldb leading dimension of two-dimensional array used to store matrix B | | | |
| * beta single precision scalar multiplier applied to C. If 0, C does | | | |
| * not have to be a valid input | | | |
| * C single precision array of dimensions (ldc, n). ldc must be at | | | |
| * least max (1, m). | | | |
| * ldc leading dimension of two-dimensional array used to store matrix C | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated based on C = alpha * op(A)*op(B) + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/sgemm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if any of m, n, or k are < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSgemm (char transa, char transb, int m, int n, int k, | | | |
| float alpha, const float *A, int lda, | | float alpha, const float *A, int lda, | |
| const float *B, int ldb, float beta, float *C, | | const float *B, int ldb, float beta, float *C, | |
|
| int ldc); | | int ldc) | |
| | | { | |
| /* | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * void | | | |
| * cublasSsymm (char side, char uplo, int m, int n, float alpha, | | | |
| * const float *A, int lda, const float *B, int ldb, | | | |
| * float beta, float *C, int ldc); | | | |
| * | | | |
| * performs one of the matrix-matrix operations | | | |
| * | | | |
| * C = alpha * A * B + beta * C, or | | | |
| * C = alpha * B * A + beta * C, | | | |
| * | | | |
| * where alpha and beta are single precision scalars, A is a symmetric matr | | | |
| ix | | | |
| * consisting of single precision elements and stored in either lower or up | | | |
| per | | | |
| * storage mode, and B and C are m x n matrices consisting of single precis | | | |
| ion | | | |
| * elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether the symmetric matrix A appears on the left side | | | |
| * hand side or right hand side of matrix B, as follows. If side == | | | |
| 'L' | | | |
| * or 'l', then C = alpha * A * B + beta * C. If side = 'R' or 'r', | | | |
| * then C = alpha * B * A + beta * C. | | | |
| * uplo specifies whether the symmetric matrix A is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * m specifies the number of rows of the matrix C, and the number of r | | | |
| ows | | | |
| * of matrix B. It also specifies the dimensions of symmetric matrix | | | |
| A | | | |
| * when side == 'L' or 'l'. m must be at least zero. | | | |
| * n specifies the number of columns of the matrix C, and the number o | | | |
| f | | | |
| * columns of matrix B. It also specifies the dimensions of symmetri | | | |
| c | | | |
| * matrix A when side == 'R' or 'r'. n must be at least zero. | | | |
| * alpha single precision scalar multiplier applied to A * B, or B * A | | | |
| * A single precision array of dimensions (lda, ka), where ka is m whe | | | |
| n | | | |
| * side == 'L' or 'l' and is n otherwise. If side == 'L' or 'l' the | | | |
| * leading m x m part of array A must contain the symmetric matrix, | | | |
| * such that when uplo == 'U' or 'u', the leading m x m part stores | | | |
| the | | | |
| * upper triangular part of the symmetric matrix, and the strictly l | | | |
| ower | | | |
| * triangular part of A is not referenced, and when uplo == 'U' or ' | | | |
| u', | | | |
| * the leading m x m part stores the lower triangular part of the | | | |
| * symmetric matrix and the strictly upper triangular part is not | | | |
| * referenced. If side == 'R' or 'r' the leading n x n part of array | | | |
| A | | | |
| * must contain the symmetric matrix, such that when uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n part stores the upper triangular part of the | | | |
| * symmetric matrix and the strictly lower triangular part of A is n | | | |
| ot | | | |
| * referenced, and when uplo == 'U' or 'u', the leading n x n part | | | |
| * stores the lower triangular part of the symmetric matrix and the | | | |
| * strictly upper triangular part is not referenced. | | | |
| * lda leading dimension of A. When side == 'L' or 'l', it must be at le | | | |
| ast | | | |
| * max(1, m) and at least max(1, n) otherwise. | | | |
| * B single precision array of dimensions (ldb, n). On entry, the lead | | | |
| ing | | | |
| * m x n part of the array contains the matrix B. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * beta single precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input | | | |
| * C single precision array of dimensions (ldc, n) | | | |
| * ldc leading dimension of C. Must be at least max(1, m) | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * B + beta * C, or C = alpha * | | | |
| * B * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ssymm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSsymm (char side, char uplo, int m, int n, float alpha | | | |
| , | | | |
| const float *A, int lda, const float *B, int ld | | | |
| b, | | | |
| float beta, float *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSsyrk (char uplo, char trans, int n, int k, float alpha, | | | |
| * const float *A, int lda, float beta, float *C, int ldc) | | | |
| * | | | |
| * performs one of the symmetric rank k operations | | | |
| * | | | |
| * C = alpha * A * transpose(A) + beta * C, or | | | |
| * C = alpha * transpose(A) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are single precision scalars. C is an n x n symmetric mat | | | |
| rix | | | |
| * consisting of single precision elements and stored in either lower or | | | |
| * upper storage mode. A is a matrix consisting of single precision element | | | |
| s | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the symmetric matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| C = | | | |
| * alpha * transpose(A) + beta * C. If trans == 'T', 't', 'C', or 'c | | | |
| ', | | | |
| * C = transpose(A) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha single precision scalar multiplier applied to A * transpose(A) or | | | |
| * transpose(A) * A. | | | |
| * A single precision array of dimensions (lda, ka), where ka is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contains the | | | |
| * matrix A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1, k). | | | |
| * beta single precision scalar multiplier applied to C. If beta izs zero | | | |
| , C | | | |
| * does not have to be a valid input | | | |
| * C single precision array of dimensions (ldc, n). If uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the symmetric matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo == 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the symmetric matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * ldc leading dimension of C. It must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * transpose(A) + beta * C, or | | | |
| C = | | | |
| * alpha * transpose(A) * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ssyrk.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSsyrk (char uplo, char trans, int n, int k, float alph | | | |
| a, | | | |
| const float *A, int lda, float beta, float *C, | | | |
| int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasSsyr2k (char uplo, char trans, int n, int k, float alpha, | | | |
| * const float *A, int lda, const float *B, int ldb, | | | |
| * float beta, float *C, int ldc) | | | |
| * | | | |
| * performs one of the symmetric rank 2k operations | | | |
| * | | | |
| * C = alpha * A * transpose(B) + alpha * B * transpose(A) + beta * C, o | | | |
| r | | | |
| * C = alpha * transpose(A) * B + alpha * transpose(B) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are single precision scalars. C is an n x n symmetric mat | | | |
| rix | | | |
| * consisting of single precision elements and stored in either lower or up | | | |
| per | | | |
| * storage mode. A and B are matrices consisting of single precision elemen | | | |
| ts | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the symmetric matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be references | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| * C = alpha * A * transpose(B) + alpha * B * transpose(A) + beta * | | | |
| C, | | | |
| * If trans == 'T', 't', 'C', or 'c', C = alpha * transpose(A) * B + | | | |
| * alpha * transpose(B) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha single precision scalar multiplier. | | | |
| * A single precision array of dimensions (lda, ka), where ka is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1,k). | | | |
| * B single precision array of dimensions (lda, kb), where kb is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array B must contain the matrix B, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * B. | | | |
| * ldb leading dimension of N. When trans == 'N' or 'n' then ldb must be | | | |
| at | | | |
| * least max(1, n). Otherwise ldb must be at least max(1, k). | | | |
| * beta single precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input. | | | |
| * C single precision array of dimensions (ldc, n). If uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the symmetric matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo == 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the symmetric matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * ldc leading dimension of C. Must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to alpha*A*transpose(B) + alpha*B*transpose(A) | | | |
| + | | | |
| * beta*C or alpha*transpose(A)*B + alpha*transpose(B)*A + beta*C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ssyr2k.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasSsyr2k (char uplo, char trans, int n, int k, float alp | | | |
| ha, | | | |
| const float *A, int lda, const float *B, int l | | | |
| db, | | | |
| float beta, float *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasStrmm (char side, char uplo, char transa, char diag, int m, int n, | | | |
| * float alpha, const float *A, int lda, const float *B, int l | | | |
| db) | | | |
| * | | | |
| * performs one of the matrix-matrix operations | | | |
| * | | | |
| * B = alpha * op(A) * B, or B = alpha * B * op(A) | | | |
| * | | | |
| * where alpha is a single-precision scalar, B is an m x n matrix composed | | | |
| * of single precision elements, and A is a unit or non-unit, upper or lowe | | | |
| r, | | | |
| * triangular matrix composed of single precision elements. op(A) is one of | | | |
| * | | | |
| * op(A) = A or op(A) = transpose(A) | | | |
| * | | | |
| * Matrices A and B are stored in column major format, and lda and ldb are | | | |
| * the leading dimensions of the two-dimensonials arrays that contain A and | | | |
| * B, respectively. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether op(A) multiplies B from the left or right. | | | |
| * If side = 'L' or 'l', then B = alpha * op(A) * B. If side = | | | |
| * 'R' or 'r', then B = alpha * B * op(A). | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo = 'U' or 'u', A is an upper triangular matrix. | | | |
| * If uplo = 'L' or 'l', A is a lower triangular matrix. | | | |
| * transa specifies the form of op(A) to be used in the matrix | | | |
| * multiplication. If transa = 'N' or 'n', then op(A) = A. If | | | |
| * transa = 'T', 't', 'C', or 'c', then op(A) = transpose(A). | | | |
| * diag specifies whether or not A is unit triangular. If diag = 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag = 'N' or | | | |
| * 'n', A is not assumed to be unit triangular. | | | |
| * m the number of rows of matrix B. m must be at least zero. | | | |
| * n the number of columns of matrix B. n must be at least zero. | | | |
| * alpha single precision scalar multiplier applied to op(A)*B, or | | | |
| * B*op(A), respectively. If alpha is zero no accesses are made | | | |
| * to matrix A, and no read accesses are made to matrix B. | | | |
| * A single precision array of dimensions (lda, k). k = m if side = | | | |
| * 'L' or 'l', k = n if side = 'R' or 'r'. If uplo = 'U' or 'u' | | | |
| * the leading k x k upper triangular part of the array A must | | | |
| * contain the upper triangular matrix, and the strictly lower | | | |
| * triangular part of A is not referenced. If uplo = 'L' or 'l' | | | |
| * the leading k x k lower triangular part of the array A must | | | |
| * contain the lower triangular matrix, and the strictly upper | | | |
| * triangular part of A is not referenced. When diag = 'U' or 'u' | | | |
| * the diagonal elements of A are no referenced and are assumed | | | |
| * to be unity. | | | |
| * lda leading dimension of A. When side = 'L' or 'l', it must be at | | | |
| * least max(1,m) and at least max(1,n) otherwise | | | |
| * B single precision array of dimensions (ldb, n). On entry, the | | | |
| * leading m x n part of the array contains the matrix B. It is | | | |
| * overwritten with the transformed matrix on exit. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * B updated according to B = alpha * op(A) * B or B = alpha * B * op | | | |
| (A) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/strmm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasStrmm (char side, char uplo, char transa, char diag, | | | |
| int m, int n, float alpha, const float *A, int | | | |
| lda, | | | |
| float *B, int ldb); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasStrsm (char side, char uplo, char transa, char diag, int m, int n, | | | |
| * float alpha, const float *A, int lda, float *B, int ldb) | | | |
| * | | | |
| * solves one of the matrix equations | | | |
| * | | | |
| * op(A) * X = alpha * B, or X * op(A) = alpha * B, | | | |
| * | | | |
| * where alpha is a single precision scalar, and X and B are m x n matrices | | | |
| * that are composed of single precision elements. A is a unit or non-unit, | | | |
| * upper or lower triangular matrix, and op(A) is one of | | | |
| * | | | |
| * op(A) = A or op(A) = transpose(A) | | | |
| * | | | |
| * The result matrix X overwrites input matrix B; that is, on exit the resu | | | |
| lt | | | |
| * is stored in B. Matrices A and B are stored in column major format, and | | | |
| * lda and ldb are the leading dimensions of the two-dimensonials arrays th | | | |
| at | | | |
| * contain A and B, respectively. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether op(A) appears on the left or right of X as | | | |
| * follows: side = 'L' or 'l' indicates solve op(A) * X = alpha * B. | | | |
| * side = 'R' or 'r' indicates solve X * op(A) = alpha * B. | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix as follows: uplo = 'U' or 'u' indicates A is an upper | | | |
| * triangular matrix. uplo = 'L' or 'l' indicates A is a lower | | | |
| * triangular matrix. | | | |
| * transa specifies the form of op(A) to be used in matrix multiplication | | | |
| * as follows: If transa = 'N' or 'N', then op(A) = A. If transa = | | | |
| * 'T', 't', 'C', or 'c', then op(A) = transpose(A). | | | |
| * diag specifies whether or not A is a unit triangular matrix like so: | | | |
| * if diag = 'U' or 'u', A is assumed to be unit triangular. If | | | |
| * diag = 'N' or 'n', then A is not assumed to be unit triangular. | | | |
| * m specifies the number of rows of B. m must be at least zero. | | | |
| * n specifies the number of columns of B. n must be at least zero. | | | |
| * alpha is a single precision scalar to be multiplied with B. When alpha | | | |
| is | | | |
| * zero, then A is not referenced and B need not be set before entry | | | |
| . | | | |
| * A is a single precision array of dimensions (lda, k), where k is | | | |
| * m when side = 'L' or 'l', and is n when side = 'R' or 'r'. If | | | |
| * uplo = 'U' or 'u', the leading k x k upper triangular part of | | | |
| * the array A must contain the upper triangular matrix and the | | | |
| * strictly lower triangular matrix of A is not referenced. When | | | |
| * uplo = 'L' or 'l', the leading k x k lower triangular part of | | | |
| * the array A must contain the lower triangular matrix and the | | | |
| * strictly upper triangular part of A is not referenced. Note that | | | |
| * when diag = 'U' or 'u', the diagonal elements of A are not | | | |
| * referenced, and are assumed to be unity. | | | |
| * lda is the leading dimension of the two dimensional array containing | | | |
| A. | | | |
| * When side = 'L' or 'l' then lda must be at least max(1, m), when | | | |
| * side = 'R' or 'r' then lda must be at least max(1, n). | | | |
| * B is a single precision array of dimensions (ldb, n). ldb must be | | | |
| * at least max (1,m). The leading m x n part of the array B must | | | |
| * contain the right-hand side matrix B. On exit B is overwritten | | | |
| * by the solution matrix X. | | | |
| * ldb is the leading dimension of the two dimensional array containing | | | |
| B. | | | |
| * ldb must be at least max(1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * B contains the solution matrix X satisfying op(A) * X = alpha * B, | | | |
| * or X * op(A) = alpha * B | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/strsm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasStrsm (char side, char uplo, char transa, char diag, | | | |
| int m, int n, float alpha, const float *A, int | | | |
| lda, | | | |
| float *B, int ldb); | | | |
| | | | |
| /* ----------------- CUBLAS single complex BLAS3 functions ---------------- | | | |
| - */ | | | |
| | | | |
|
| /* | | cublasStatus_t error = cublasSgemm_v2(handle, convertToOp(transa), conve | |
| * void cublasCgemm (char transa, char transb, int m, int n, int k, | | rtToOp(transb), | |
| * cuComplex alpha, const cuComplex *A, int lda, | | m, n, k, &alpha, A, lda, B, ldb, &beta, | |
| * const cuComplex *B, int ldb, cuComplex beta, | | C, ldc); | |
| * cuComplex *C, int ldc) | | cublasSetError(error); | |
| * | | } | |
| * performs one of the matrix-matrix operations | | static __inline__ void CUBLASAPI cublasDgemm (char transa, char transb, int | |
| * | | m, int n, int k, | |
| * C = alpha * op(A) * op(B) + beta*C, | | double alpha, const double *A, int lda, | |
| * | | const double *B, int ldb, double beta, double * | |
| * where op(X) is one of | | C, | |
| * | | int ldc) | |
| * op(X) = X or op(X) = transpose or op(X) = conjg(transpose(X)) | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * alpha and beta are single-complex scalars, and A, B and C are matrices | | cublasStatus_t error = cublasDgemm_v2(handle, convertToOp(transa), conv | |
| * consisting of single-complex elements, with op(A) an m x k matrix, op(B) | | ertToOp(transb), | |
| * a k x n matrix and C an m x n matrix. | | m, n, k, &alpha, A, lda, B, ldb, &beta, | |
| * | | C, ldc); | |
| * Input | | cublasSetError(error); | |
| * ----- | | } | |
| * transa specifies op(A). If transa == 'N' or 'n', op(A) = A. If transa == | | static __inline__ void CUBLASAPI cublasCgemm (char transa, char transb, int | |
| * 'T' or 't', op(A) = transpose(A). If transa == 'C' or 'c', op(A) | | m, int n, int k, | |
| = | | | |
| * conjg(transpose(A)). | | | |
| * transb specifies op(B). If transa == 'N' or 'n', op(B) = B. If transb == | | | |
| * 'T' or 't', op(B) = transpose(B). If transb == 'C' or 'c', op(B) | | | |
| = | | | |
| * conjg(transpose(B)). | | | |
| * m number of rows of matrix op(A) and rows of matrix C. It must be a | | | |
| t | | | |
| * least zero. | | | |
| * n number of columns of matrix op(B) and number of columns of C. It | | | |
| * must be at least zero. | | | |
| * k number of columns of matrix op(A) and number of rows of op(B). It | | | |
| * must be at least zero. | | | |
| * alpha single-complex scalar multiplier applied to op(A)op(B) | | | |
| * A single-complex array of dimensions (lda, k) if transa == 'N' or | | | |
| * 'n'), and of dimensions (lda, m) otherwise. | | | |
| * lda leading dimension of A. When transa == 'N' or 'n', it must be at | | | |
| * least max(1, m) and at least max(1, k) otherwise. | | | |
| * B single-complex array of dimensions (ldb, n) if transb == 'N' or ' | | | |
| n', | | | |
| * and of dimensions (ldb, k) otherwise | | | |
| * ldb leading dimension of B. When transb == 'N' or 'n', it must be at | | | |
| * least max(1, k) and at least max(1, n) otherwise. | | | |
| * beta single-complex scalar multiplier applied to C. If beta is zero, C | | | |
| * does not have to be a valid input. | | | |
| * C single precision array of dimensions (ldc, n) | | | |
| * ldc leading dimension of C. Must be at least max(1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha*op(A)*op(B) + beta*C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cgemm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if any of m, n, or k are < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCgemm (char transa, char transb, int m, int n, int k, | | | |
| cuComplex alpha, const cuComplex *A, int lda, | | | |
| const cuComplex *B, int ldb, cuComplex beta, | | | |
| cuComplex *C, int ldc); | | | |
| /* | | | |
| * void | | | |
| * cublasCsymm (char side, char uplo, int m, int n, cuComplex alpha, | | | |
| * const cuComplex *A, int lda, const cuComplex *B, int ldb, | | | |
| * cuComplex beta, cuComplex *C, int ldc); | | | |
| * | | | |
| * performs one of the matrix-matrix operations | | | |
| * | | | |
| * C = alpha * A * B + beta * C, or | | | |
| * C = alpha * B * A + beta * C, | | | |
| * | | | |
| * where alpha and beta are single precision complex scalars, A is a symmet | | | |
| ric matrix | | | |
| * consisting of single precision complex elements and stored in either low | | | |
| er or upper | | | |
| * storage mode, and B and C are m x n matrices consisting of single precis | | | |
| ion | | | |
| * complex elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether the symmetric matrix A appears on the left side | | | |
| * hand side or right hand side of matrix B, as follows. If side == | | | |
| 'L' | | | |
| * or 'l', then C = alpha * A * B + beta * C. If side = 'R' or 'r', | | | |
| * then C = alpha * B * A + beta * C. | | | |
| * uplo specifies whether the symmetric matrix A is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * m specifies the number of rows of the matrix C, and the number of r | | | |
| ows | | | |
| * of matrix B. It also specifies the dimensions of symmetric matrix | | | |
| A | | | |
| * when side == 'L' or 'l'. m must be at least zero. | | | |
| * n specifies the number of columns of the matrix C, and the number o | | | |
| f | | | |
| * columns of matrix B. It also specifies the dimensions of symmetri | | | |
| c | | | |
| * matrix A when side == 'R' or 'r'. n must be at least zero. | | | |
| * alpha single precision scalar multiplier applied to A * B, or B * A | | | |
| * A single precision array of dimensions (lda, ka), where ka is m whe | | | |
| n | | | |
| * side == 'L' or 'l' and is n otherwise. If side == 'L' or 'l' the | | | |
| * leading m x m part of array A must contain the symmetric matrix, | | | |
| * such that when uplo == 'U' or 'u', the leading m x m part stores | | | |
| the | | | |
| * upper triangular part of the symmetric matrix, and the strictly l | | | |
| ower | | | |
| * triangular part of A is not referenced, and when uplo == 'U' or ' | | | |
| u', | | | |
| * the leading m x m part stores the lower triangular part of the | | | |
| * symmetric matrix and the strictly upper triangular part is not | | | |
| * referenced. If side == 'R' or 'r' the leading n x n part of array | | | |
| A | | | |
| * must contain the symmetric matrix, such that when uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n part stores the upper triangular part of the | | | |
| * symmetric matrix and the strictly lower triangular part of A is n | | | |
| ot | | | |
| * referenced, and when uplo == 'U' or 'u', the leading n x n part | | | |
| * stores the lower triangular part of the symmetric matrix and the | | | |
| * strictly upper triangular part is not referenced. | | | |
| * lda leading dimension of A. When side == 'L' or 'l', it must be at le | | | |
| ast | | | |
| * max(1, m) and at least max(1, n) otherwise. | | | |
| * B single precision array of dimensions (ldb, n). On entry, the lead | | | |
| ing | | | |
| * m x n part of the array contains the matrix B. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * beta single precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input | | | |
| * C single precision array of dimensions (ldc, n) | | | |
| * ldc leading dimension of C. Must be at least max(1, m) | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * B + beta * C, or C = alpha * | | | |
| * B * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/csymm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCsymm (char side, char uplo, int m, int n, | | | |
| cuComplex alpha, const cuComplex *A, int lda, | | | |
| const cuComplex *B, int ldb, cuComplex beta, | | | |
| cuComplex *C, int ldc); | | | |
| /* | | | |
| * void | | | |
| * cublasChemm (char side, char uplo, int m, int n, cuComplex alpha, | | | |
| * const cuComplex *A, int lda, const cuComplex *B, int ldb, | | | |
| * cuComplex beta, cuComplex *C, int ldc); | | | |
| * | | | |
| * performs one of the matrix-matrix operations | | | |
| * | | | |
| * C = alpha * A * B + beta * C, or | | | |
| * C = alpha * B * A + beta * C, | | | |
| * | | | |
| * where alpha and beta are single precision complex scalars, A is a hermit | | | |
| ian matrix | | | |
| * consisting of single precision complex elements and stored in either low | | | |
| er or upper | | | |
| * storage mode, and B and C are m x n matrices consisting of single precis | | | |
| ion | | | |
| * complex elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether the hermitian matrix A appears on the left side | | | |
| * hand side or right hand side of matrix B, as follows. If side == | | | |
| 'L' | | | |
| * or 'l', then C = alpha * A * B + beta * C. If side = 'R' or 'r', | | | |
| * then C = alpha * B * A + beta * C. | | | |
| * uplo specifies whether the hermitian matrix A is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the hermitian matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the hermitian matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * m specifies the number of rows of the matrix C, and the number of r | | | |
| ows | | | |
| * of matrix B. It also specifies the dimensions of hermitian matrix | | | |
| A | | | |
| * when side == 'L' or 'l'. m must be at least zero. | | | |
| * n specifies the number of columns of the matrix C, and the number o | | | |
| f | | | |
| * columns of matrix B. It also specifies the dimensions of hermitia | | | |
| n | | | |
| * matrix A when side == 'R' or 'r'. n must be at least zero. | | | |
| * alpha single precision complex scalar multiplier applied to A * B, or B | | | |
| * A | | | |
| * A single precision complex array of dimensions (lda, ka), where ka | | | |
| is m when | | | |
| * side == 'L' or 'l' and is n otherwise. If side == 'L' or 'l' the | | | |
| * leading m x m part of array A must contain the hermitian matrix, | | | |
| * such that when uplo == 'U' or 'u', the leading m x m part stores | | | |
| the | | | |
| * upper triangular part of the hermitian matrix, and the strictly l | | | |
| ower | | | |
| * triangular part of A is not referenced, and when uplo == 'U' or ' | | | |
| u', | | | |
| * the leading m x m part stores the lower triangular part of the | | | |
| * hermitian matrix and the strictly upper triangular part is not | | | |
| * referenced. If side == 'R' or 'r' the leading n x n part of array | | | |
| A | | | |
| * must contain the hermitian matrix, such that when uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n part stores the upper triangular part of the | | | |
| * hermitian matrix and the strictly lower triangular part of A is n | | | |
| ot | | | |
| * referenced, and when uplo == 'U' or 'u', the leading n x n part | | | |
| * stores the lower triangular part of the hermitian matrix and the | | | |
| * strictly upper triangular part is not referenced. The imaginary p | | | |
| arts | | | |
| * of the diagonal elements need not be set, they are assumed to be | | | |
| zero. | | | |
| * lda leading dimension of A. When side == 'L' or 'l', it must be at le | | | |
| ast | | | |
| * max(1, m) and at least max(1, n) otherwise. | | | |
| * B single precision complex array of dimensions (ldb, n). On entry, | | | |
| the leading | | | |
| * m x n part of the array contains the matrix B. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * beta single precision complex scalar multiplier applied to C. If beta | | | |
| is zero, C | | | |
| * does not have to be a valid input | | | |
| * C single precision complex array of dimensions (ldc, n) | | | |
| * ldc leading dimension of C. Must be at least max(1, m) | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * B + beta * C, or C = alpha * | | | |
| * B * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/chemm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasChemm (char side, char uplo, int m, int n, | | | |
| cuComplex alpha, const cuComplex *A, int lda, | | cuComplex alpha, const cuComplex *A, int lda, | |
| const cuComplex *B, int ldb, cuComplex beta, | | const cuComplex *B, int ldb, cuComplex beta, | |
|
| cuComplex *C, int ldc); | | cuComplex *C, int ldc) | |
| /* | | { | |
| * void | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * cublasCsyrk (char uplo, char trans, int n, int k, cuComplex alpha, | | cublasStatus_t error = cublasCgemm_v2(handle, convertToOp(transa), conv | |
| * const cuComplex *A, int lda, cuComplex beta, cuComplex *C, | | ertToOp(transb), | |
| int ldc) | | m, n, k, &alpha, A, lda, B, ldb, &beta, | |
| * | | C, ldc); | |
| * performs one of the symmetric rank k operations | | cublasSetError(error); | |
| * | | | |
| * C = alpha * A * transpose(A) + beta * C, or | | | |
| * C = alpha * transpose(A) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are single precision complex scalars. C is an n x n symme | | | |
| tric matrix | | | |
| * consisting of single precision complex elements and stored in either low | | | |
| er or | | | |
| * upper storage mode. A is a matrix consisting of single precision complex | | | |
| elements | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the symmetric matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| C = | | | |
| * alpha * transpose(A) + beta * C. If trans == 'T', 't', 'C', or 'c | | | |
| ', | | | |
| * C = transpose(A) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha single precision complex scalar multiplier applied to A * transpo | | | |
| se(A) or | | | |
| * transpose(A) * A. | | | |
| * A single precision complex array of dimensions (lda, ka), where ka | | | |
| is k when | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contains the | | | |
| * matrix A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1, k). | | | |
| * beta single precision complex scalar multiplier applied to C. If beta | | | |
| izs zero, C | | | |
| * does not have to be a valid input | | | |
| * C single precision complex array of dimensions (ldc, n). If uplo = | | | |
| 'U' or 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the symmetric matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo = 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the symmetric matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * ldc leading dimension of C. It must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * transpose(A) + beta * C, or | | | |
| C = | | | |
| * alpha * transpose(A) * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/csyrk.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| | | | |
| void CUBLASAPI cublasCsyrk (char uplo, char trans, int n, int k, | | | |
| cuComplex alpha, const cuComplex *A, int lda, | | | |
| cuComplex beta, cuComplex *C, int ldc); | | | |
| /* | | | |
| * void | | | |
| * cublasCherk (char uplo, char trans, int n, int k, float alpha, | | | |
| * const cuComplex *A, int lda, float beta, cuComplex *C, int | | | |
| ldc) | | | |
| * | | | |
| * performs one of the hermitian rank k operations | | | |
| * | | | |
| * C = alpha * A * conjugate(transpose(A)) + beta * C, or | | | |
| * C = alpha * conjugate(transpose(A)) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are single precision real scalars. C is an n x n hermitia | | | |
| n matrix | | | |
| * consisting of single precision complex elements and stored in either low | | | |
| er or | | | |
| * upper storage mode. A is a matrix consisting of single precision complex | | | |
| elements | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the hermitian matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the hermitian matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the hermitian matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| C = | | | |
| * alpha * A * conjugate(transpose(A)) + beta * C. If trans == 'T', | | | |
| 't', 'C', or 'c', | | | |
| * C = alpha * conjugate(transpose(A)) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of columns of matr | | | |
| ix A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha single precision scalar multiplier applied to A * conjugate(trans | | | |
| pose(A)) or | | | |
| * conjugate(transpose(A)) * A. | | | |
| * A single precision complex array of dimensions (lda, ka), where ka | | | |
| is k when | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contains the | | | |
| * matrix A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1, k). | | | |
| * beta single precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input. | | | |
| * C single precision complex array of dimensions (ldc, n). If uplo = | | | |
| 'U' or 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the hermitian matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo = 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the hermitian matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * The imaginary parts of the diagonal elements need | | | |
| * not be set, they are assumed to be zero, and on exit they | | | |
| * are set to zero. | | | |
| * ldc leading dimension of C. It must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * conjugate(transpose(A)) + be | | | |
| ta * C, or C = | | | |
| * alpha * conjugate(transpose(A)) * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cherk.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCherk (char uplo, char trans, int n, int k, | | | |
| float alpha, const cuComplex *A, int lda, | | | |
| float beta, cuComplex *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasCsyr2k (char uplo, char trans, int n, int k, cuComplex alpha, | | | |
| * const cuComplex *A, int lda, const cuComplex *B, int ldb, | | | |
| * cuComplex beta, cuComplex *C, int ldc) | | | |
| * | | | |
| * performs one of the symmetric rank 2k operations | | | |
| * | | | |
| * C = alpha * A * transpose(B) + alpha * B * transpose(A) + beta * C, o | | | |
| r | | | |
| * C = alpha * transpose(A) * B + alpha * transpose(B) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are single precision complex scalars. C is an n x n symme | | | |
| tric matrix | | | |
| * consisting of single precision complex elements and stored in either low | | | |
| er or upper | | | |
| * storage mode. A and B are matrices consisting of single precision comple | | | |
| x elements | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the symmetric matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be references | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| * C = alpha * A * transpose(B) + alpha * B * transpose(A) + beta * | | | |
| C, | | | |
| * If trans == 'T', 't', 'C', or 'c', C = alpha * transpose(A) * B + | | | |
| * alpha * transpose(B) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha single precision complex scalar multiplier. | | | |
| * A single precision complex array of dimensions (lda, ka), where ka | | | |
| is k when | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1,k). | | | |
| * B single precision complex array of dimensions (lda, kb), where kb | | | |
| is k when | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array B must contain the matrix B, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * B. | | | |
| * ldb leading dimension of N. When trans == 'N' or 'n' then ldb must be | | | |
| at | | | |
| * least max(1, n). Otherwise ldb must be at least max(1, k). | | | |
| * beta single precision complex scalar multiplier applied to C. If beta | | | |
| is zero, C | | | |
| * does not have to be a valid input. | | | |
| * C single precision complex array of dimensions (ldc, n). If uplo == | | | |
| 'U' or 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the symmetric matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo == 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the symmetric matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * ldc leading dimension of C. Must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to alpha*A*transpose(B) + alpha*B*transpose(A) | | | |
| + | | | |
| * beta*C or alpha*transpose(A)*B + alpha*transpose(B)*A + beta*C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/csyr2k.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCsyr2k (char uplo, char trans, int n, int k, | | | |
| cuComplex alpha, const cuComplex *A, int lda, | | | |
| const cuComplex *B, int ldb, cuComplex beta, | | | |
| cuComplex *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasCher2k (char uplo, char trans, int n, int k, cuComplex alpha, | | | |
| * const cuComplex *A, int lda, const cuComplex *B, int ldb, | | | |
| * float beta, cuComplex *C, int ldc) | | | |
| * | | | |
| * performs one of the hermitian rank 2k operations | | | |
| * | | | |
| * C = alpha * A * conjugate(transpose(B)) | | | |
| * + conjugate(alpha) * B * conjugate(transpose(A)) | | | |
| * + beta * C , | | | |
| * or | | | |
| * C = alpha * conjugate(transpose(A)) * B | | | |
| * + conjugate(alpha) * conjugate(transpose(B)) * A | | | |
| * + beta * C. | | | |
| * | | | |
| * Alpha is single precision complex scalar whereas Beta is a single preoci | | | |
| sion real scalar. | | | |
| * C is an n x n hermitian matrix consisting of single precision complex el | | | |
| ements | | | |
| * and stored in either lower or upper storage mode. A and B are matrices c | | | |
| onsisting | | | |
| * of single precision complex elements with dimension of n x k in the firs | | | |
| t case, | | | |
| * and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the hermitian matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the hermitian matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the hermitian matrix is to be references | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| * C = alpha * A * conjugate(transpose(B)) | | | |
| * + conjugate(alpha) * B * conjugate(transpose(A)) | | | |
| * + beta * C . | | | |
| * If trans == 'T', 't', 'C', or 'c', | | | |
| * C = alpha * conjugate(transpose(A)) * B | | | |
| * + conjugate(alpha) * conjugate(transpose(B)) * A | | | |
| * + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha single precision complex scalar multiplier. | | | |
| * A single precision complex array of dimensions (lda, ka), where ka | | | |
| is k when | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1,k). | | | |
| * B single precision complex array of dimensions (lda, kb), where kb | | | |
| is k when | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array B must contain the matrix B, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * B. | | | |
| * ldb leading dimension of N. When trans == 'N' or 'n' then ldb must be | | | |
| at | | | |
| * least max(1, n). Otherwise ldb must be at least max(1, k). | | | |
| * beta single precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input. | | | |
| * C single precision complex array of dimensions (ldc, n). If uplo == | | | |
| 'U' or 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the hermitian matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo == 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the hermitian matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * The imaginary parts of the diagonal elements need | | | |
| * not be set, they are assumed to be zero, and on exit they | | | |
| * are set to zero. | | | |
| * ldc leading dimension of C. Must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to alpha*A*conjugate(transpose(B)) + | | | |
| * + conjugate(alpha)*B*conjugate(transpose(A)) + beta*C or | | | |
| * alpha*conjugate(transpose(A))*B + conjugate(alpha)*conjugate(tran | | | |
| spose(B))*A | | | |
| * + beta*C. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/cher2k.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCher2k (char uplo, char trans, int n, int k, | | | |
| cuComplex alpha, const cuComplex *A, int lda, | | | |
| const cuComplex *B, int ldb, float beta, | | | |
| cuComplex *C, int ldc); | | | |
| /* | | | |
| * void | | | |
| * cublasCtrmm (char side, char uplo, char transa, char diag, int m, int n, | | | |
| * cuComplex alpha, const cuComplex *A, int lda, const cuCompl | | | |
| ex *B, | | | |
| * int ldb) | | | |
| * | | | |
| * performs one of the matrix-matrix operations | | | |
| * | | | |
| * B = alpha * op(A) * B, or B = alpha * B * op(A) | | | |
| * | | | |
| * where alpha is a single-precision complex scalar, B is an m x n matrix c | | | |
| omposed | | | |
| * of single precision complex elements, and A is a unit or non-unit, upper | | | |
| or lower, | | | |
| * triangular matrix composed of single precision complex elements. op(A) i | | | |
| s one of | | | |
| * | | | |
| * op(A) = A , op(A) = transpose(A) or op(A) = conjugate(transpose(A)) | | | |
| * | | | |
| * Matrices A and B are stored in column major format, and lda and ldb are | | | |
| * the leading dimensions of the two-dimensonials arrays that contain A and | | | |
| * B, respectively. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether op(A) multiplies B from the left or right. | | | |
| * If side = 'L' or 'l', then B = alpha * op(A) * B. If side = | | | |
| * 'R' or 'r', then B = alpha * B * op(A). | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo = 'U' or 'u', A is an upper triangular matrix. | | | |
| * If uplo = 'L' or 'l', A is a lower triangular matrix. | | | |
| * transa specifies the form of op(A) to be used in the matrix | | | |
| * multiplication. If transa = 'N' or 'n', then op(A) = A. If | | | |
| * transa = 'T' or 't', then op(A) = transpose(A). | | | |
| * If transa = 'C' or 'c', then op(A) = conjugate(transpose(A)). | | | |
| * diag specifies whether or not A is unit triangular. If diag = 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag = 'N' or | | | |
| * 'n', A is not assumed to be unit triangular. | | | |
| * m the number of rows of matrix B. m must be at least zero. | | | |
| * n the number of columns of matrix B. n must be at least zero. | | | |
| * alpha single precision complex scalar multiplier applied to op(A)*B, or | | | |
| * B*op(A), respectively. If alpha is zero no accesses are made | | | |
| * to matrix A, and no read accesses are made to matrix B. | | | |
| * A single precision complex array of dimensions (lda, k). k = m if s | | | |
| ide = | | | |
| * 'L' or 'l', k = n if side = 'R' or 'r'. If uplo = 'U' or 'u' | | | |
| * the leading k x k upper triangular part of the array A must | | | |
| * contain the upper triangular matrix, and the strictly lower | | | |
| * triangular part of A is not referenced. If uplo = 'L' or 'l' | | | |
| * the leading k x k lower triangular part of the array A must | | | |
| * contain the lower triangular matrix, and the strictly upper | | | |
| * triangular part of A is not referenced. When diag = 'U' or 'u' | | | |
| * the diagonal elements of A are no referenced and are assumed | | | |
| * to be unity. | | | |
| * lda leading dimension of A. When side = 'L' or 'l', it must be at | | | |
| * least max(1,m) and at least max(1,n) otherwise | | | |
| * B single precision complex array of dimensions (ldb, n). On entry, | | | |
| the | | | |
| * leading m x n part of the array contains the matrix B. It is | | | |
| * overwritten with the transformed matrix on exit. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * B updated according to B = alpha * op(A) * B or B = alpha * B * op | | | |
| (A) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ctrmm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCtrmm (char side, char uplo, char transa, char diag, | | | |
| int m, int n, cuComplex alpha, const cuComplex | | | |
| *A, | | | |
| int lda, cuComplex *B, int ldb); | | | |
| /* | | | |
| * void | | | |
| * cublasCtrsm (char side, char uplo, char transa, char diag, int m, int n, | | | |
| * cuComplex alpha, const cuComplex *A, int lda, | | | |
| * cuComplex *B, int ldb) | | | |
| * | | | |
| * solves one of the matrix equations | | | |
| * | | | |
| * op(A) * X = alpha * B, or X * op(A) = alpha * B, | | | |
| * | | | |
| * where alpha is a single precision complex scalar, and X and B are m x n | | | |
| matrices | | | |
| * that are composed of single precision complex elements. A is a unit or n | | | |
| on-unit, | | | |
| * upper or lower triangular matrix, and op(A) is one of | | | |
| * | | | |
| * op(A) = A or op(A) = transpose(A) or op( A ) = conj( A' ). | | | |
| * | | | |
| * The result matrix X overwrites input matrix B; that is, on exit the resu | | | |
| lt | | | |
| * is stored in B. Matrices A and B are stored in column major format, and | | | |
| * lda and ldb are the leading dimensions of the two-dimensonials arrays th | | | |
| at | | | |
| * contain A and B, respectively. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether op(A) appears on the left or right of X as | | | |
| * follows: side = 'L' or 'l' indicates solve op(A) * X = alpha * B. | | | |
| * side = 'R' or 'r' indicates solve X * op(A) = alpha * B. | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix as follows: uplo = 'U' or 'u' indicates A is an upper | | | |
| * triangular matrix. uplo = 'L' or 'l' indicates A is a lower | | | |
| * triangular matrix. | | | |
| * transa specifies the form of op(A) to be used in matrix multiplication | | | |
| * as follows: If transa = 'N' or 'N', then op(A) = A. If transa = | | | |
| * 'T', 't', 'C', or 'c', then op(A) = transpose(A). | | | |
| * diag specifies whether or not A is a unit triangular matrix like so: | | | |
| * if diag = 'U' or 'u', A is assumed to be unit triangular. If | | | |
| * diag = 'N' or 'n', then A is not assumed to be unit triangular. | | | |
| * m specifies the number of rows of B. m must be at least zero. | | | |
| * n specifies the number of columns of B. n must be at least zero. | | | |
| * alpha is a single precision complex scalar to be multiplied with B. Whe | | | |
| n alpha is | | | |
| * zero, then A is not referenced and B need not be set before entry | | | |
| . | | | |
| * A is a single precision complex array of dimensions (lda, k), where | | | |
| k is | | | |
| * m when side = 'L' or 'l', and is n when side = 'R' or 'r'. If | | | |
| * uplo = 'U' or 'u', the leading k x k upper triangular part of | | | |
| * the array A must contain the upper triangular matrix and the | | | |
| * strictly lower triangular matrix of A is not referenced. When | | | |
| * uplo = 'L' or 'l', the leading k x k lower triangular part of | | | |
| * the array A must contain the lower triangular matrix and the | | | |
| * strictly upper triangular part of A is not referenced. Note that | | | |
| * when diag = 'U' or 'u', the diagonal elements of A are not | | | |
| * referenced, and are assumed to be unity. | | | |
| * lda is the leading dimension of the two dimensional array containing | | | |
| A. | | | |
| * When side = 'L' or 'l' then lda must be at least max(1, m), when | | | |
| * side = 'R' or 'r' then lda must be at least max(1, n). | | | |
| * B is a single precision complex array of dimensions (ldb, n). ldb m | | | |
| ust be | | | |
| * at least max (1,m). The leading m x n part of the array B must | | | |
| * contain the right-hand side matrix B. On exit B is overwritten | | | |
| * by the solution matrix X. | | | |
| * ldb is the leading dimension of the two dimensional array containing | | | |
| B. | | | |
| * ldb must be at least max(1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * B contains the solution matrix X satisfying op(A) * X = alpha * B, | | | |
| * or X * op(A) = alpha * B | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ctrsm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasCtrsm (char side, char uplo, char transa, char diag, | | | |
| int m, int n, cuComplex alpha, const cuComplex | | | |
| *A, | | | |
| int lda, cuComplex *B, int ldb); | | | |
| | | | |
| void CUBLASAPI cublasXerbla (const char *srName, int info); | | | |
| | | | |
| /* ---------------- CUBLAS double-precision BLAS1 functions --------------- | | | |
| - */ | | | |
| | | | |
| /* | | | |
| * double | | | |
| * cublasDasum (int n, const double *x, int incx) | | | |
| * | | | |
| * computes the sum of the absolute values of the elements of double | | | |
| * precision vector x; that is, the result is the sum from i = 0 to n - 1 o | | | |
| f | | | |
| * abs(x[1 + i * incx]). | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x double-precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns the double-precision sum of absolute values | | | |
| * (0 if n <= 0 or incx <= 0, or if an error occurs) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dasum.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| double CUBLASAPI cublasDasum (int n, const double *x, int incx); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDaxpy (int n, double alpha, const double *x, int incx, double *y, | | | |
| * int incy) | | | |
| * | | | |
| * multiplies double-precision vector x by double-precision scalar alpha | | | |
| * and adds the result to double-precision vector y; that is, it overwrites | | | |
| * double-precision y with double-precision alpha * x + y. For i = 0 to n-1 | | | |
| , | | | |
| * it replaces y[ly + i * incy] with alpha * x[lx + i * incx] + y[ly + i*in | | | |
| cy], | | | |
| * where lx = 1 if incx >= 0, else lx = 1 + (1 - n) * incx; ly is defined i | | | |
| n a | | | |
| * similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * alpha double-precision scalar multiplier | | | |
| * x double-precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y double-precision result (unchanged if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/daxpy.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library was not initialized | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDaxpy (int n, double alpha, const double *x, int incx, | | | |
| double *y, int incy); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDcopy (int n, const double *x, int incx, double *y, int incy) | | | |
| * | | | |
| * copies the double-precision vector x to the double-precision vector y. F | | | |
| or | | | |
| * i = 0 to n-1, copies x[lx + i * incx] to y[ly + i * incy], where lx = 1 | | | |
| if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and ly is defined in a similar | | | |
| * way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x double-precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y contains double precision vector x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dcopy.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDcopy (int n, const double *x, int incx, double *y, | | | |
| int incy); | | | |
| | | | |
| /* | | | |
| * double | | | |
| * cublasDdot (int n, const double *x, int incx, const double *y, int incy) | | | |
| * | | | |
| * computes the dot product of two double-precision vectors. It returns the | | | |
| * dot product of the double precision vectors x and y if successful, and | | | |
| * 0.0f otherwise. It computes the sum for i = 0 to n - 1 of x[lx + i * | | | |
| * incx] * y[ly + i * incy], where lx = 1 if incx >= 0, else lx = 1 + (1 - | | | |
| n) | | | |
| * *incx, and ly is defined in a similar way using incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x double-precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns double-precision dot product (zero if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ddot.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has nor been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to execute on GPU | | | |
| */ | | | |
| double CUBLASAPI cublasDdot (int n, const double *x, int incx, const double | | | |
| *y, | | | |
| int incy); | | | |
| | | | |
| /* | | | |
| * double | | | |
| * dnrm2 (int n, const double *x, int incx) | | | |
| * | | | |
| * computes the Euclidean norm of the double-precision n-vector x (with | | | |
| * storage increment incx). This code uses a multiphase model of | | | |
| * accumulation to avoid intermediate underflow and overflow. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * x double-precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * returns Euclidian norm (0 if n <= 0 or incx <= 0, or if an error occurs) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dnrm2.f | | | |
| * Reference: http://www.netlib.org/slatec/lin/dnrm2.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| double CUBLASAPI cublasDnrm2 (int n, const double *x, int incx); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDrot (int n, double *x, int incx, double *y, int incy, double sc, | | | |
| * double ss) | | | |
| * | | | |
| * multiplies a 2x2 matrix ( sc ss) with the 2xn matrix ( transpose(x) ) | | | |
| * (-ss sc) ( transpose(y) ) | | | |
| * | | | |
| * The elements of x are in x[lx + i * incx], i = 0 ... n - 1, where lx = 1 | | | |
| if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and similarly for y using ly an | | | |
| d | | | |
| * incy. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x double-precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * sc element of rotation matrix | | | |
| * ss element of rotation matrix | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x rotated vector x (unchanged if n <= 0) | | | |
| * y rotated vector y (unchanged if n <= 0) | | | |
| * | | | |
| * Reference http://www.netlib.org/blas/drot.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDrot (int n, double *x, int incx, double *y, int incy, | | | |
| double sc, double ss); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDrotg (double *host_sa, double *host_sb, double *host_sc, double * | | | |
| host_ss) | | | |
| * | | | |
| * constructs the Givens tranformation | | | |
| * | | | |
| * ( sc ss ) | | | |
| * G = ( ) , sc^2 + ss^2 = 1, | | | |
| * (-ss sc ) | | | |
| * | | | |
| * which zeros the second entry of the 2-vector transpose(sa, sb). | | | |
| * | | | |
| * The quantity r = (+/-) sqrt (sa^2 + sb^2) overwrites sa in storage. The | | | |
| * value of sb is overwritten by a value z which allows sc and ss to be | | | |
| * recovered by the following algorithm: | | | |
| * | | | |
| * if z=1 set sc = 0.0 and ss = 1.0 | | | |
| * if abs(z) < 1 set sc = sqrt(1-z^2) and ss = z | | | |
| * if abs(z) > 1 set sc = 1/z and ss = sqrt(1-sc^2) | | | |
| * | | | |
| * The function drot (n, x, incx, y, incy, sc, ss) normally is called next | | | |
| * to apply the transformation to a 2 x n matrix. | | | |
| * Note that this function is provided for completeness and run exclusively | | | |
| * on the Host. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * sa double-precision scalar | | | |
| * sb double-precision scalar | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * sa double-precision r | | | |
| * sb double-precision z | | | |
| * sc double-precision result | | | |
| * ss double-precision result | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/drotg.f | | | |
| * | | | |
| * This function does not set any error status. | | | |
| */ | | | |
| void CUBLASAPI cublasDrotg (double *host_sa, double *host_sb, double *host_ | | | |
| sc, double *host_ss); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDrotm (int n, double *x, int incx, double *y, int incy, | | | |
| * const double* sparam) | | | |
| * | | | |
| * applies the modified Givens transformation, h, to the 2 x n matrix | | | |
| * | | | |
| * ( transpose(x) ) | | | |
| * ( transpose(y) ) | | | |
| * | | | |
| * The elements of x are in x[lx + i * incx], i = 0 to n-1, where lx = 1 if | | | |
| * incx >= 0, else lx = 1 + (1 - n) * incx, and similarly for y using ly an | | | |
| d | | | |
| * incy. With sparam[0] = sflag, h has one of the following forms: | | | |
| * | | | |
| * sflag = -1.0 sflag = 0.0 sflag = 1.0 sflag = -2.0 | | | |
| * | | | |
| * (sh00 sh01) (1.0 sh01) (sh00 1.0) (1.0 0.0) | | | |
| * h = ( ) ( ) ( ) ( ) | | | |
| * (sh10 sh11) (sh10 1.0) (-1.0 sh11) (0.0 1.0) | | | |
| * | | | |
| * Note that this function is provided for completeness and run exclusively | | | |
| * on the Host. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * x double-precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * y double-precision vector with n elements | | | |
| * incy storage spacing between elements of y | | | |
| * sparam 5-element vector. sparam[0] is sflag described above. sparam[1] | | | |
| * through sparam[4] contain the 2x2 rotation matrix h: sparam[1] | | | |
| * contains sh00, sparam[2] contains sh10, sparam[3] contains sh01, | | | |
| * and sprams[4] contains sh11. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x rotated vector x (unchanged if n <= 0) | | | |
| * y rotated vector y (unchanged if n <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/drotm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDrotm(int n, double *x, int incx, double *y, int incy, | | | |
| const double* sparam); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDrotmg (double *host_sd1, double *host_sd2, double *host_sx1, cons | | | |
| t double *host_sy1, | | | |
| * double *host_sparam) | | | |
| * | | | |
| * constructs the modified Givens transformation matrix h which zeros | | | |
| * the second component of the 2-vector transpose(sqrt(sd1)*sx1,sqrt(sd2)*s | | | |
| y1). | | | |
| * With sparam[0] = sflag, h has one of the following forms: | | | |
| * | | | |
| * sflag = -1.0 sflag = 0.0 sflag = 1.0 sflag = -2.0 | | | |
| * | | | |
| * (sh00 sh01) (1.0 sh01) (sh00 1.0) (1.0 0.0) | | | |
| * h = ( ) ( ) ( ) ( ) | | | |
| * (sh10 sh11) (sh10 1.0) (-1.0 sh11) (0.0 1.0) | | | |
| * | | | |
| * sparam[1] through sparam[4] contain sh00, sh10, sh01, sh11, | | | |
| * respectively. Values of 1.0, -1.0, or 0.0 implied by the value | | | |
| * of sflag are not stored in sparam. | | | |
| * Note that this function is provided for completeness and run exclusively | | | |
| * on the Host. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * sd1 single precision scalar | | | |
| * sd2 single precision scalar | | | |
| * sx1 single precision scalar | | | |
| * sy1 single precision scalar | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * sd1 changed to represent the effect of the transformation | | | |
| * sd2 changed to represent the effect of the transformation | | | |
| * sx1 changed to represent the effect of the transformation | | | |
| * sparam 5-element vector. sparam[0] is sflag described above. sparam[1] | | | |
| * through sparam[4] contain the 2x2 rotation matrix h: sparam[1] | | | |
| * contains sh00, sparam[2] contains sh10, sparam[3] contains sh01, | | | |
| * and sprams[4] contains sh11. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/drotmg.f | | | |
| * | | | |
| * This functions does not set any error status. | | | |
| */ | | | |
| void CUBLASAPI cublasDrotmg (double *host_sd1, double *host_sd2, double *ho | | | |
| st_sx1, | | | |
| const double *host_sy1, double* host_sparam); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDscal (int n, double alpha, double *x, int incx) | | | |
| * | | | |
| * replaces double-precision vector x with double-precision alpha * x. For | | | |
| * i = 0 to n-1, it replaces x[lx + i * incx] with alpha * x[lx + i * incx] | | | |
| , | | | |
| * where lx = 1 if incx >= 0, else lx = 1 + (1 - n) * incx. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vector | | | |
| * alpha double-precision scalar multiplier | | | |
| * x double-precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x double-precision result (unchanged if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dscal.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library was not initialized | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDscal (int n, double alpha, double *x, int incx); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDswap (int n, double *x, int incx, double *y, int incy) | | | |
| * | | | |
| * replaces double-precision vector x with double-precision alpha * x. For | | | |
| i | | | |
| * = 0 to n - 1, it replaces x[ix + i * incx] with alpha * x[ix + i * incx] | | | |
| , | | | |
| * where ix = 1 if incx >= 0, else ix = 1 + (1 - n) * incx. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * n number of elements in input vectors | | | |
| * alpha double-precision scalar multiplier | | | |
| * x double-precision vector with n elements | | | |
| * incx storage spacing between elements of x | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x double precision result (unchanged if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dswap.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDswap (int n, double *x, int incx, double *y, int incy | | | |
| ); | | | |
| | | | |
|
| /* | | } | |
| * int | | static __inline__ void CUBLASAPI cublasZgemm (char transa, char transb, int | |
| * idamax (int n, const double *x, int incx) | | m, int n, | |
| * | | int k, cuDoubleComplex alpha, | |
| * finds the smallest index of the maximum magnitude element of double- | | const cuDoubleComplex *A, int lda, | |
| * precision vector x; that is, the result is the first i, i = 0 to n - 1, | | const cuDoubleComplex *B, int ldb, | |
| * that maximizes abs(x[1 + i * incx])). | | cuDoubleComplex beta, cuDoubleComplex *C, | |
| * | | int ldc) | |
| * Input | | { | |
| * ----- | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * n number of elements in input vector | | cublasStatus_t error = cublasZgemm_v2(handle, convertToOp(transa), conv | |
| * x double-precision vector with n elements | | ertToOp(transb), | |
| * incx storage spacing between elements of x | | m, n, k, &alpha, A, lda, B, ldb, &beta, | |
| * | | C, ldc); | |
| * Output | | cublasSetError(error); | |
| * ------ | | | |
| * returns the smallest index (0 if n <= 0 or incx <= 0) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/idamax.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| int CUBLASAPI cublasIdamax (int n, const double *x, int incx); | | | |
| | | | |
|
| /* | | } | |
| * int | | /* -------------------------------------------------------*/ | |
| * idamin (int n, const double *x, int incx) | | /* SYRK */ | |
| * | | static __inline__ void CUBLASAPI cublasSsyrk (char uplo, char trans, int n, | |
| * finds the smallest index of the minimum magnitude element of double- | | int k, float alpha, | |
| * precision vector x; that is, the result is the first i, i = 0 to n - 1, | | const float *A, int lda, float beta, float *C, | |
| * that minimizes abs(x[1 + i * incx])). | | int ldc) | |
| * | | { | |
| * Input | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * ----- | | cublasStatus_t error = cublasSsyrk_v2(handle, convertToFillMode(uplo), | |
| * n number of elements in input vector | | convertToOp(trans), | |
| * x double-precision vector with n elements | | n, k, &alpha, A, lda, &beta, C, ldc); | |
| * incx storage spacing between elements of x | | cublasSetError(error); | |
| * | | } | |
| * Output | | static __inline__ void CUBLASAPI cublasDsyrk (char uplo, char trans, int n, | |
| * ------ | | int k, | |
| * returns the smallest index (0 if n <= 0 or incx <= 0) | | double alpha, const double *A, int lda, | |
| * | | double beta, double *C, int ldc) | |
| * Reference: http://www.netlib.org/scilib/blass.f | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Error status for this function can be retrieved via cublasGetError(). | | cublasStatus_t error = cublasDsyrk_v2(handle, convertToFillMode(uplo), | |
| * | | convertToOp(trans), | |
| * Error Status | | n, k, &alpha, A, lda, &beta, C, ldc); | |
| * ------------ | | cublasSetError(error); | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | } | |
| d | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| int CUBLASAPI cublasIdamin (int n, const double *x, int incx); | | | |
| | | | |
|
| /* ---------------- CUBLAS double precision BLAS2 functions --------------- | | static __inline__ void CUBLASAPI cublasCsyrk (char uplo, char trans, int n, | |
| - */ | | int k, | |
| | | cuComplex alpha, const cuComplex *A, int lda, | |
| | | cuComplex beta, cuComplex *C, int ldc) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasCsyrk_v2(handle, convertToFillMode(uplo), | |
| | | convertToOp(trans), | |
| | | n, k, &alpha, A, lda, &beta, C, ldc); | |
| | | cublasSetError(error); | |
| | | } | |
| | | static __inline__ void CUBLASAPI cublasZsyrk (char uplo, char trans, int n, | |
| | | int k, | |
| | | cuDoubleComplex alpha, | |
| | | const cuDoubleComplex *A, int lda, | |
| | | cuDoubleComplex beta, | |
| | | cuDoubleComplex *C, int ldc) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZsyrk_v2(handle, convertToFillMode(uplo), | |
| | | convertToOp(trans), | |
| | | n, k, &alpha, A, lda, &beta, C, ldc); | |
| | | cublasSetError(error); | |
| | | } | |
| | | /* ------------------------------------------------------- */ | |
| | | /* HERK */ | |
| | | static __inline__ void CUBLASAPI cublasCherk (char uplo, char trans, int n, | |
| | | int k, | |
| | | float alpha, const cuComplex *A, int lda, | |
| | | float beta, cuComplex *C, int ldc) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasCherk_v2(handle, convertToFillMode(uplo), | |
| | | convertToOp(trans), | |
| | | n, k, &alpha, A, lda, &beta, C, ldc); | |
| | | cublasSetError(error); | |
| | | | |
|
| /* | | } | |
| * cublasDgemv (char trans, int m, int n, double alpha, const double *A, | | static __inline__ void CUBLASAPI cublasZherk (char uplo, char trans, int n, | |
| * int lda, const double *x, int incx, double beta, double *y, | | int k, | |
| * int incy) | | double alpha, | |
| * | | const cuDoubleComplex *A, int lda, | |
| * performs one of the matrix-vector operations | | double beta, | |
| * | | cuDoubleComplex *C, int ldc) | |
| * y = alpha * op(A) * x + beta * y, | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * where op(A) is one of | | cublasStatus_t error = cublasZherk_v2(handle, convertToFillMode(uplo), | |
| * | | convertToOp(trans), | |
| * op(A) = A or op(A) = transpose(A) | | n, k, &alpha, A, lda, &beta, C, ldc); | |
| * | | cublasSetError(error); | |
| * where alpha and beta are double precision scalars, x and y are double | | | |
| * precision vectors, and A is an m x n matrix consisting of double precisi | | | |
| on | | | |
| * elements. Matrix A is stored in column major format, and lda is the lead | | | |
| ing | | | |
| * dimension of the two-dimensional array in which A is stored. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * trans specifies op(A). If transa = 'n' or 'N', op(A) = A. If trans = | | | |
| * trans = 't', 'T', 'c', or 'C', op(A) = transpose(A) | | | |
| * m specifies the number of rows of the matrix A. m must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. n must be at lea | | | |
| st | | | |
| * zero. | | | |
| * alpha double precision scalar multiplier applied to op(A). | | | |
| * A double precision array of dimensions (lda, n) if trans = 'n' or | | | |
| * 'N'), and of dimensions (lda, m) otherwise. lda must be at least | | | |
| * max(1, m) and at least max(1, n) otherwise. | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )) | | | |
| * when trans = 'N' or 'n' and at least (1 + (m - 1) * abs(incx)) | | | |
| * otherwise. | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * beta double precision scalar multiplier applied to vector y. If beta | | | |
| * is zero, y is not read. | | | |
| * y double precision array of length at least (1 + (m - 1) * abs(incy | | | |
| )) | | | |
| * when trans = 'N' or 'n' and at least (1 + (n - 1) * abs(incy)) | | | |
| * otherwise. | | | |
| * incy specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to alpha * op(A) * x + beta * y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dgemv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0, or if incx or incy == | | | |
| 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDgemv (char trans, int m, int n, double alpha, | | | |
| const double *A, int lda, const double *x, | | | |
| int incx, double beta, double *y, int incy); | | | |
| | | | |
|
| /* | | } | |
| * cublasDger (int m, int n, double alpha, const double *x, int incx, | | /* ------------------------------------------------------- */ | |
| * const double *y, int incy, double *A, int lda) | | /* SYR2K */ | |
| * | | static __inline__ void CUBLASAPI cublasSsyr2k (char uplo, char trans, int n | |
| * performs the symmetric rank 1 operation | | , int k, float alpha, | |
| * | | const float *A, int lda, const float *B, int l | |
| * A = alpha * x * transpose(y) + A, | | db, | |
| * | | float beta, float *C, int ldc) | |
| * where alpha is a double precision scalar, x is an m element double | | { | |
| * precision vector, y is an n element double precision vector, and A | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * is an m by n matrix consisting of double precision elements. Matrix A | | cublasStatus_t error = cublasSsyr2k_v2(handle, convertToFillMode(uplo), | |
| * is stored in column major format, and lda is the leading dimension of | | convertToOp(trans), | |
| * the two-dimensional array used to store A. | | n, k, &alpha, A, lda, B, ldb, &beta, C, | |
| * | | ldc); | |
| * Input | | cublasSetError(error); | |
| * ----- | | | |
| * m specifies the number of rows of the matrix A. It must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. It must be at | | | |
| * least zero. | | | |
| * alpha double precision scalar multiplier applied to x * transpose(y) | | | |
| * x double precision array of length at least (1 + (m - 1) * abs(incx | | | |
| )) | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * y double precision array of length at least (1 + (n - 1) * abs(incy | | | |
| )) | | | |
| * incy specifies the storage spacing between elements of y. incy must no | | | |
| t | | | |
| * be zero. | | | |
| * A double precision array of dimensions (lda, n). | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * transpose(y) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dger.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDger (int m, int n, double alpha, const double *x, | | | |
| int incx, const double *y, int incy, | | | |
| double *A, int lda); | | | |
| | | | |
|
| /* | | } | |
| * void | | | |
| * cublasDsyr (char uplo, int n, double alpha, const double *x, int incx, | | | |
| * double *A, int lda) | | | |
| * | | | |
| * performs the symmetric rank 1 operation | | | |
| * | | | |
| * A = alpha * x * transpose(x) + A, | | | |
| * | | | |
| * where alpha is a double precision scalar, x is an n element double | | | |
| * precision vector and A is an n x n symmetric matrix consisting of | | | |
| * double precision elements. Matrix A is stored in column major format, | | | |
| * and lda is the leading dimension of the two-dimensional array | | | |
| * containing A. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or | | | |
| * the lower triangular part of array A. If uplo = 'U' or 'u', | | | |
| * then only the upper triangular part of A may be referenced. | | | |
| * If uplo = 'L' or 'l', then only the lower triangular part of | | | |
| * A may be referenced. | | | |
| * n specifies the number of rows and columns of the matrix A. It | | | |
| * must be at least 0. | | | |
| * alpha double precision scalar multiplier applied to x * transpose(x) | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )) | | | |
| * incx specifies the storage spacing between elements of x. incx must | | | |
| * not be zero. | | | |
| * A double precision array of dimensions (lda, n). If uplo = 'U' or | | | |
| * 'u', then A must contain the upper triangular part of a symmetric | | | |
| * matrix, and the strictly lower triangular part is not referenced. | | | |
| * If uplo = 'L' or 'l', then A contains the lower triangular part | | | |
| * of a symmetric matrix, and the strictly upper triangular part is | | | |
| * not referenced. | | | |
| * lda leading dimension of the two-dimensional array containing A. lda | | | |
| * must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * transpose(x) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dsyr.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or incx == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDsyr (char uplo, int n, double alpha, | | | |
| const double *x, int incx, double *A, | | | |
| int lda); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasDsyr2k (char uplo, char trans, int n | |
| * void cublasDsyr2 (char uplo, int n, double alpha, const double *x, int i | | , int k, | |
| ncx, | | double alpha, const double *A, int lda, | |
| * const double *y, int incy, double *A, int lda) | | const double *B, int ldb, double beta, | |
| * | | double *C, int ldc) | |
| * performs the symmetric rank 2 operation | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * A = alpha*x*transpose(y) + alpha*y*transpose(x) + A, | | cublasStatus_t error = cublasDsyr2k_v2(handle, convertToFillMode(uplo), | |
| * | | convertToOp(trans), | |
| * where alpha is a double precision scalar, x and y are n element double | | n, k, &alpha, A, lda, B, ldb, &beta, C, | |
| * precision vector and A is an n by n symmetric matrix consisting of doubl | | ldc); | |
| e | | cublasSetError(error); | |
| * precision elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array A. If uplo == 'U' or 'u', then only the | | | |
| * upper triangular part of A may be referenced and the lower triang | | | |
| ular | | | |
| * part of A is inferred. If uplo == 'L' or 'l', then only the lower | | | |
| * triangular part of A may be referenced and the upper triangular p | | | |
| art | | | |
| * of A is inferred. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha double precision scalar multiplier applied to x * transpose(y) + | | | |
| * y * transpose(x). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs (inc | | | |
| x)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * y double precision array of length at least (1 + (n - 1) * abs (inc | | | |
| y)). | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * A double precision array of dimensions (lda, n). If uplo == 'U' or | | | |
| 'u', | | | |
| * then A must contains the upper triangular part of a symmetric mat | | | |
| rix, | | | |
| * and the strictly lower triangular parts is not referenced. If upl | | | |
| o == | | | |
| * 'L' or 'l', then A contains the lower triangular part of a symmet | | | |
| ric | | | |
| * matrix, and the strictly upper triangular part is not referenced. | | | |
| * lda leading dimension of A. It must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha*x*transpose(y)+alpha*y*transpose(x | | | |
| )+A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dsyr2.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDsyr2 (char uplo, int n, double alpha, | | | |
| const double *x, int incx, const double *y, | | | |
| int incy, double *A, int lda); | | | |
| | | | |
|
| /* | | } | |
| * void | | static __inline__ void CUBLASAPI cublasCsyr2k (char uplo, char trans, int n | |
| * cublasDspr (char uplo, int n, double alpha, const double *x, int incx, | | , int k, | |
| * double *AP) | | cuComplex alpha, const cuComplex *A, int lda, | |
| * | | const cuComplex *B, int ldb, cuComplex beta, | |
| * performs the symmetric rank 1 operation | | cuComplex *C, int ldc) | |
| * | | { | |
| * A = alpha * x * transpose(x) + A, | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasCsyr2k_v2(handle, convertToFillMode(uplo), | |
| * where alpha is a double precision scalar and x is an n element double | | convertToOp(trans), | |
| * precision vector. A is a symmetric n x n matrix consisting of double | | n, k, &alpha, A, lda, B, ldb, &beta, C, | |
| * precision elements that is supplied in packed form. | | ldc); | |
| * | | cublasSetError(error); | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array AP. If uplo == 'U' or 'u', then the uppe | | | |
| r | | | |
| * triangular part of A is supplied in AP. If uplo == 'L' or 'l', th | | | |
| en | | | |
| * the lower triangular part of A is supplied in AP. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha double precision scalar multiplier applied to x * transpose(x). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * AP double precision array with at least ((n * (n + 1)) / 2) elements | | | |
| . If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * transpose(x) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dspr.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or incx == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDspr (char uplo, int n, double alpha, | | | |
| const double *x, int incx, double *AP); | | | |
| /* | | | |
| * void | | | |
| * cublasDspr2 (char uplo, int n, double alpha, const double *x, int incx, | | | |
| * const double *y, int incy, double *AP) | | | |
| * | | | |
| * performs the symmetric rank 2 operation | | | |
| * | | | |
| * A = alpha*x*transpose(y) + alpha*y*transpose(x) + A, | | | |
| * | | | |
| * where alpha is a double precision scalar, and x and y are n element doub | | | |
| le | | | |
| * precision vectors. A is a symmetric n x n matrix consisting of double | | | |
| * precision elements that is supplied in packed form. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array A. If uplo == 'U' or 'u', then only the | | | |
| * upper triangular part of A may be referenced and the lower triang | | | |
| ular | | | |
| * part of A is inferred. If uplo == 'L' or 'l', then only the lower | | | |
| * triangular part of A may be referenced and the upper triangular p | | | |
| art | | | |
| * of A is inferred. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha double precision scalar multiplier applied to x * transpose(y) + | | | |
| * y * transpose(x). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs (inc | | | |
| x)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * y double precision array of length at least (1 + (n - 1) * abs (inc | | | |
| y)). | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * AP double precision array with at least ((n * (n + 1)) / 2) elements | | | |
| . If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha*x*transpose(y)+alpha*y*transpose(x | | | |
| )+A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dspr2.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDspr2 (char uplo, int n, double alpha, | | | |
| const double *x, int incx, const double *y, | | | |
| int incy, double *AP); | | | |
| | | | |
|
| /* | | } | |
| * void | | | |
| * cublasDtrsv (char uplo, char trans, char diag, int n, const double *A, | | | |
| * int lda, double *x, int incx) | | | |
| * | | | |
| * solves a system of equations op(A) * x = b, where op(A) is either A or | | | |
| * transpose(A). b and x are double precision vectors consisting of n | | | |
| * elements, and A is an n x n matrix composed of a unit or non-unit, upper | | | |
| * or lower triangular matrix. Matrix A is stored in column major format, | | | |
| * and lda is the leading dimension of the two-dimensional array containing | | | |
| * A. | | | |
| * | | | |
| * No test for singularity or near-singularity is included in this function | | | |
| . | | | |
| * Such tests must be performed before calling this function. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the | | | |
| * lower triangular part of array A. If uplo = 'U' or 'u', then only | | | |
| * the upper triangular part of A may be referenced. If uplo = 'L' o | | | |
| r | | | |
| * 'l', then only the lower triangular part of A may be referenced. | | | |
| * trans specifies op(A). If transa = 'n' or 'N', op(A) = A. If transa = ' | | | |
| t', | | | |
| * 'T', 'c', or 'C', op(A) = transpose(A) | | | |
| * diag specifies whether or not A is a unit triangular matrix like so: | | | |
| * if diag = 'U' or 'u', A is assumed to be unit triangular. If | | | |
| * diag = 'N' or 'n', then A is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. It | | | |
| * must be at least 0. | | | |
| * A is a double precision array of dimensions (lda, n). If uplo = 'U' | | | |
| * or 'u', then A must contains the upper triangular part of a symme | | | |
| tric | | | |
| * matrix, and the strictly lower triangular parts is not referenced | | | |
| . | | | |
| * If uplo = 'L' or 'l', then A contains the lower triangular part o | | | |
| f | | | |
| * a symmetric matrix, and the strictly upper triangular part is not | | | |
| * referenced. | | | |
| * lda is the leading dimension of the two-dimensional array containing | | | |
| A. | | | |
| * lda must be at least max(1, n). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * On entry, x contains the n element right-hand side vector b. On e | | | |
| xit, | | | |
| * it is overwritten with the solution vector x. | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dtrsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDtrsv (char uplo, char trans, char diag, int n, | | | |
| const double *A, int lda, double *x, | | | |
| int incx); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasZsyr2k (char uplo, char trans, int n | |
| * void | | , int k, | |
| * cublasDtrmv (char uplo, char trans, char diag, int n, const double *A, | | cuDoubleComplex alpha, const cuDoubleComplex * | |
| * int lda, double *x, int incx); | | A, int lda, | |
| * | | const cuDoubleComplex *B, int ldb, cuDoubleCom | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | plex beta, | |
| = | | cuDoubleComplex *C, int ldc) | |
| = A, or op(A) = transpose(A). x is an n-element single precision vector, a | | { | |
| nd | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * A is an n x n, unit or non-unit, upper or lower, triangular matrix compo | | cublasStatus_t error = cublasZsyr2k_v2(handle, convertToFillMode(uplo), | |
| sed | | convertToOp(trans), | |
| * of single precision elements. | | n, k, &alpha, A, lda, B, ldb, &beta, C, | |
| * | | ldc); | |
| * Input | | cublasSetError(error); | |
| * ----- | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo = 'U' or 'u', then A is an upper triangular matri | | | |
| x. | | | |
| * If uplo = 'L' or 'l', then A is a lower triangular matrix. | | | |
| * trans specifies op(A). If transa = 'N' or 'n', op(A) = A. If trans = 'T | | | |
| ', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A) | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag = ' | | | |
| U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag = 'N' or 'n', | | | |
| A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * A single precision array of dimension (lda, n). If uplo = 'U' or 'u | | | |
| ', | | | |
| * the leading n x n upper triangular part of the array A must conta | | | |
| in | | | |
| * the upper triangular matrix and the strictly lower triangular par | | | |
| t | | | |
| * of A is not referenced. If uplo = 'L' or 'l', the leading n x n l | | | |
| ower | | | |
| * triangular part of the array A must contain the lower triangular | | | |
| * matrix and the strictly upper triangular part of A is not referen | | | |
| ced. | | | |
| * When diag = 'U' or 'u', the diagonal elements of A are not refere | | | |
| nced | | | |
| * either, but are are assumed to be unity. | | | |
| * lda is the leading dimension of A. It must be at least max (1, n). | | | |
| * x single precision array of length at least (1 + (n - 1) * abs(incx | | | |
| ) ). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x, | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dtrmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDtrmv (char uplo, char trans, char diag, int n, | | | |
| const double *A, int lda, double *x, int incx); | | | |
| | | | |
|
| /* | | } | |
| * void | | /* ------------------------------------------------------- */ | |
| * cublasDgbmv (char trans, int m, int n, int kl, int ku, double alpha, | | /* HER2K */ | |
| * const double *A, int lda, const double *x, int incx, double | | static __inline__ void CUBLASAPI cublasCher2k (char uplo, char trans, int n | |
| beta, | | , int k, | |
| * double *y, int incy); | | cuComplex alpha, const cuComplex *A, int lda, | |
| * | | const cuComplex *B, int ldb, float beta, | |
| * performs one of the matrix-vector operations | | cuComplex *C, int ldc) | |
| * | | { | |
| * y = alpha*op(A)*x + beta*y, op(A)=A or op(A) = transpose(A) | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasCher2k_v2(handle, convertToFillMode(uplo), | |
| * alpha and beta are double precision scalars. x and y are double precisio | | convertToOp(trans), | |
| n | | n, k, &alpha, A, lda, B, ldb, &beta, C, | |
| * vectors. A is an m by n band matrix consisting of double precision eleme | | ldc); | |
| nts | | cublasSetError(error); | |
| * with kl sub-diagonals and ku super-diagonals. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A) | | | |
| * m specifies the number of rows of the matrix A. m must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. n must be at lea | | | |
| st | | | |
| * zero. | | | |
| * kl specifies the number of sub-diagonals of matrix A. It must be at | | | |
| * least zero. | | | |
| * ku specifies the number of super-diagonals of matrix A. It must be a | | | |
| t | | | |
| * least zero. | | | |
| * alpha double precision scalar multiplier applied to op(A). | | | |
| * A double precision array of dimensions (lda, n). The leading | | | |
| * (kl + ku + 1) x n part of the array A must contain the band matri | | | |
| x A, | | | |
| * supplied column by column, with the leading diagonal of the matri | | | |
| x | | | |
| * in row (ku + 1) of the array, the first super-diagonal starting a | | | |
| t | | | |
| * position 2 in row ku, the first sub-diagonal starting at position | | | |
| 1 | | | |
| * in row (ku + 2), and so on. Elements in the array A that do not | | | |
| * correspond to elements in the band matrix (such as the top left | | | |
| * ku x ku triangle) are not referenced. | | | |
| * lda leading dimension of A. lda must be at least (kl + ku + 1). | | | |
| * x double precision array of length at least (1+(n-1)*abs(incx)) whe | | | |
| n | | | |
| * trans == 'N' or 'n' and at least (1+(m-1)*abs(incx)) otherwise. | | | |
| * incx specifies the increment for the elements of x. incx must not be z | | | |
| ero. | | | |
| * beta double precision scalar multiplier applied to vector y. If beta i | | | |
| s | | | |
| * zero, y is not read. | | | |
| * y double precision array of length at least (1+(m-1)*abs(incy)) whe | | | |
| n | | | |
| * trans == 'N' or 'n' and at least (1+(n-1)*abs(incy)) otherwise. I | | | |
| f | | | |
| * beta is zero, y is not read. | | | |
| * incy On entry, incy specifies the increment for the elements of y. inc | | | |
| y | | | |
| * must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*op(A)*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dgbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDgbmv (char trans, int m, int n, int kl, int ku, | | | |
| double alpha, const double *A, int lda, | | | |
| const double *x, int incx, double beta, | | | |
| double *y, int incy); | | | |
| | | | |
|
| /* | | } | |
| * void | | | |
| * cublasDtbmv (char uplo, char trans, char diag, int n, int k, const doubl | | | |
| e *A, | | | |
| * int lda, double *x, int incx) | | | |
| * | | | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | | |
| = A, | | | |
| * or op(A) = transpose(A). x is an n-element double precision vector, and | | | |
| A is | | | |
| * an n x n, unit or non-unit, upper or lower triangular band matrix compos | | | |
| ed | | | |
| * of double precision elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular ba | | | |
| nd | | | |
| * matrix. If uplo == 'U' or 'u', A is an upper triangular band matr | | | |
| ix. | | | |
| * If uplo == 'L' or 'l', A is a lower triangular band matrix. | | | |
| * trans specifies op(A). If transa == 'N' or 'n', op(A) = A. If trans == | | | |
| 'T', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A) | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag == | | | |
| 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag == 'N' or 'n' | | | |
| , A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * k specifies the number of super- or sub-diagonals. If uplo == 'U' o | | | |
| r | | | |
| * 'u', k specifies the number of super-diagonals. If uplo == 'L' or | | | |
| * 'l', k specifies the number of sub-diagonals. k must at least be | | | |
| * zero. | | | |
| * A double precision array of dimension (lda, n). If uplo == 'U' or ' | | | |
| u', | | | |
| * the leading (k + 1) x n part of the array A must contain the uppe | | | |
| r | | | |
| * triangular band matrix, supplied column by column, with the leadi | | | |
| ng | | | |
| * diagonal of the matrix in row (k + 1) of the array, the first | | | |
| * super-diagonal starting at position 2 in row k, and so on. The to | | | |
| p | | | |
| * left k x k triangle of the array A is not referenced. If uplo == | | | |
| 'L' | | | |
| * or 'l', the leading (k + 1) x n part of the array A must constain | | | |
| the | | | |
| * lower triangular band matrix, supplied column by column, with the | | | |
| * leading diagonal of the matrix in row 1 of the array, the first | | | |
| * sub-diagonal startingat position 1 in row 2, and so on. The botto | | | |
| m | | | |
| * right k x k triangle of the array is not referenced. | | | |
| * lda is the leading dimension of A. It must be at least (k + 1). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dtbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n or k < 0, or if incx == 0 | | | |
| * CUBLAS_STATUS_ALLOC_FAILED if function cannot allocate enough intern | | | |
| al scratch vector memory | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDtbmv (char uplo, char trans, char diag, int n, | | | |
| int k, const double *A, int lda, double *x, | | | |
| int incx); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasZher2k (char uplo, char trans, int n | |
| * void | | , int k, | |
| * cublasDtpmv (char uplo, char trans, char diag, int n, const double *AP, | | cuDoubleComplex alpha, const cuDoubleComplex * | |
| * double *x, int incx); | | A, int lda, | |
| * | | const cuDoubleComplex *B, int ldb, double beta | |
| * performs one of the matrix-vector operations x = op(A) * x, where op(A) | | , | |
| = A, | | cuDoubleComplex *C, int ldc) | |
| * or op(A) = transpose(A). x is an n element double precision vector, and | | { | |
| A | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * is an n x n, unit or non-unit, upper or lower triangular matrix composed | | cublasStatus_t error = cublasZher2k_v2(handle, convertToFillMode(uplo), | |
| * of double precision elements. | | convertToOp(trans), | |
| * | | n, k, &alpha, A, lda, B, ldb, &beta, C, | |
| * Input | | ldc); | |
| * ----- | | cublasSetError(error); | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | } | |
| * matrix. If uplo == 'U' or 'u', then A is an upper triangular matr | | | |
| ix. | | | |
| * If uplo == 'L' or 'l', then A is a lower triangular matrix. | | | |
| * trans specifies op(A). If transa == 'N' or 'n', op(A) = A. If trans == | | | |
| 'T', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A) | | | |
| * diag specifies whether or not matrix A is unit triangular. If diag == | | | |
| 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag == 'N' or 'n' | | | |
| , A | | | |
| * is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. In the current implementation n must not exceed 40 | | | |
| 70. | | | |
| * AP double precision array with at least ((n * (n + 1)) / 2) elements | | | |
| . If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored in AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * On entry, x contains the source vector. On exit, x is overwritten | | | |
| * with the result vector. | | | |
| * incx specifies the storage spacing for elements of x. incx must not be | | | |
| * zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated according to x = op(A) * x, | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dtpmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or n < 0 | | | |
| * CUBLAS_STATUS_ALLOC_FAILED if function cannot allocate enough intern | | | |
| al scratch vector memory | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDtpmv (char uplo, char trans, char diag, int n, | | | |
| const double *AP, double *x, int incx); | | | |
| | | | |
|
| /* | | /*------------------------------------------------------------------------* | |
| * void | | / | |
| * cublasDtpsv (char uplo, char trans, char diag, int n, const double *AP, | | /* SYMM*/ | |
| * double *X, int incx) | | static __inline__ void CUBLASAPI cublasSsymm (char side, char uplo, int m, | |
| * | | int n, float alpha, | |
| * solves one of the systems of equations op(A)*x = b, where op(A) is eithe | | const float *A, int lda, const float *B, int ld | |
| r | | b, | |
| * op(A) = A or op(A) = transpose(A). b and x are n element vectors, and A | | float beta, float *C, int ldc) | |
| is | | { | |
| * an n x n unit or non-unit, upper or lower triangular matrix. No test for | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * singularity or near-singularity is included in this routine. Such tests | | cublasStatus_t error = cublasSsymm_v2(handle, convertToSideMode(side),c | |
| * must be performed before calling this routine. | | onvertToFillMode(uplo), | |
| * | | m, n, &alpha, A, lda, B, ldb, &beta, C, | |
| * Input | | ldc ); | |
| * ----- | | cublasSetError(error); | |
| * uplo specifies whether the matrix is an upper or lower triangular matr | | } | |
| ix | | static __inline__ void CUBLASAPI cublasDsymm (char side, char uplo, int m, | |
| * as follows: If uplo == 'U' or 'u', A is an upper triangluar matri | | int n, double alpha, | |
| x. | | const double *A, int lda, const double *B, int | |
| * If uplo == 'L' or 'l', A is a lower triangular matrix. | | ldb, | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | double beta, double *C, int ldc) | |
| T', | | { | |
| * 't', 'C', or 'c', op(A) = transpose(A). | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * diag specifies whether A is unit triangular. If diag == 'U' or 'u', A | | cublasStatus_t error = cublasDsymm_v2(handle, convertToSideMode(side),c | |
| is | | onvertToFillMode(uplo), | |
| * assumed to be unit triangular; thas is, diagonal elements are not | | m, n, &alpha, A, lda, B, ldb, &beta, C, | |
| * read and are assumed to be unity. If diag == 'N' or 'n', A is not | | ldc ); | |
| * assumed to be unit triangular. | | cublasSetError(error); | |
| * n specifies the number of rows and columns of the matrix A. n must | | } | |
| be | | static __inline__ void CUBLASAPI cublasCsymm (char side, char uplo, int m, | |
| * at least zero. | | int n, cuComplex alpha, | |
| * AP double precision array with at least ((n*(n+1))/2) elements. If u | | const cuComplex *A, int lda, const cuComplex *B | |
| plo | | , int ldb, | |
| * == 'U' or 'u', the array AP contains the upper triangular matrix | | cuComplex beta, cuComplex *C, int ldc) | |
| A, | | { | |
| * packed sequentially, column by column; that is, if i <= j, then | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * A[i,j] is stored is AP[i+(j*(j+1)/2)]. If uplo == 'L' or 'L', the | | cublasStatus_t error = cublasCsymm_v2(handle, convertToSideMode(side),c | |
| * array AP contains the lower triangular matrix A, packed sequentia | | onvertToFillMode(uplo), | |
| lly, | | m, n, &alpha, A, lda, B, ldb, &beta, C, | |
| * column by column; that is, if i >= j, then A[i,j] is stored in | | ldc ); | |
| * AP[i+((2*n-j+1)*j)/2]. When diag = 'U' or 'u', the diagonal eleme | | cublasSetError(error); | |
| nts | | } | |
| * of A are not referenced and are assumed to be unity. | | static __inline__ void CUBLASAPI cublasZsymm (char side, char uplo, int m, | |
| * x double precision array of length at least (1+(n-1)*abs(incx)). | | int n, cuDoubleComplex alpha, | |
| * incx storage spacing between elements of x. It must not be zero. | | const cuDoubleComplex *A, int lda, const cuDoub | |
| * | | leComplex *B, int ldb, | |
| * Output | | cuDoubleComplex beta, cuDoubleComplex *C, int l | |
| * ------ | | dc) | |
| * x updated to contain the solution vector x that solves op(A) * x = | | { | |
| b. | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasZsymm_v2(handle, convertToSideMode(side),c | |
| * Reference: http://www.netlib.org/blas/dtpsv.f | | onvertToFillMode(uplo), | |
| * | | m, n, &alpha, A, lda, B, ldb, &beta, C, | |
| * Error status for this function can be retrieved via cublasGetError(). | | ldc ); | |
| * | | cublasSetError(error); | |
| * Error Status | | } | |
| * ------------ | | /*------------------------------------------------------------------------* | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | / | |
| d | | /* HEMM*/ | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 or n > 2035 | | static __inline__ void CUBLASAPI cublasChemm (char side, char uplo, int m, | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | int n, | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | cuComplex alpha, const cuComplex *A, int lda, | |
| */ | | const cuComplex *B, int ldb, cuComplex beta, | |
| void CUBLASAPI cublasDtpsv (char uplo, char trans, char diag, int n, | | cuComplex *C, int ldc) | |
| const double *AP, double *x, int incx); | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasChemm_v2(handle, convertToSideMode(side),c | |
| | | onvertToFillMode(uplo), | |
| | | m, n, &alpha, A, lda, B, ldb, &beta, C, | |
| | | ldc ); | |
| | | cublasSetError(error); | |
| | | | |
|
| /* | | } | |
| * void cublasDtbsv (char uplo, char trans, char diag, int n, int k, | | static __inline__ void CUBLASAPI cublasZhemm (char side, char uplo, int m, | |
| * const double *A, int lda, double *X, int incx) | | int n, | |
| * | | cuDoubleComplex alpha, const cuDoubleComplex *A | |
| * solves one of the systems of equations op(A)*x = b, where op(A) is eithe | | , int lda, | |
| r | | const cuDoubleComplex *B, int ldb, cuDoubleComp | |
| * op(A) = A or op(A) = transpose(A). b and x are n element vectors, and A | | lex beta, | |
| is | | cuDoubleComplex *C, int ldc) | |
| * an n x n unit or non-unit, upper or lower triangular band matrix with k | | { | |
| + 1 | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * diagonals. No test for singularity or near-singularity is included in th | | cublasStatus_t error = cublasZhemm_v2(handle, convertToSideMode(side),c | |
| is | | onvertToFillMode(uplo), | |
| * function. Such tests must be performed before calling this function. | | m, n, &alpha, A, lda, B, ldb, &beta, C, | |
| * | | ldc ); | |
| * Input | | cublasSetError(error); | |
| * ----- | | } | |
| * uplo specifies whether the matrix is an upper or lower triangular band | | | |
| * matrix as follows: If uplo == 'U' or 'u', A is an upper triangula | | | |
| r | | | |
| * band matrix. If uplo == 'L' or 'l', A is a lower triangular band | | | |
| * matrix. | | | |
| * trans specifies op(A). If trans == 'N' or 'n', op(A) = A. If trans == ' | | | |
| T', | | | |
| * 't', 'C', or 'c', op(A) = transpose(A). | | | |
| * diag specifies whether A is unit triangular. If diag == 'U' or 'u', A | | | |
| is | | | |
| * assumed to be unit triangular; thas is, diagonal elements are not | | | |
| * read and are assumed to be unity. If diag == 'N' or 'n', A is not | | | |
| * assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. n must | | | |
| be | | | |
| * at least zero. | | | |
| * k specifies the number of super- or sub-diagonals. If uplo == 'U' o | | | |
| r | | | |
| * 'u', k specifies the number of super-diagonals. If uplo == 'L' or | | | |
| * 'l', k specifies the number of sub-diagonals. k must at least be | | | |
| * zero. | | | |
| * A double precision array of dimension (lda, n). If uplo == 'U' or ' | | | |
| u', | | | |
| * the leading (k + 1) x n part of the array A must contain the uppe | | | |
| r | | | |
| * triangular band matrix, supplied column by column, with the leadi | | | |
| ng | | | |
| * diagonal of the matrix in row (k + 1) of the array, the first sup | | | |
| er- | | | |
| * diagonal starting at position 2 in row k, and so on. The top left | | | |
| * k x k triangle of the array A is not referenced. If uplo == 'L' o | | | |
| r | | | |
| * 'l', the leading (k + 1) x n part of the array A must constain th | | | |
| e | | | |
| * lower triangular band matrix, supplied column by column, with the | | | |
| * leading diagonal of the matrix in row 1 of the array, the first | | | |
| * sub-diagonal starting at position 1 in row 2, and so on. The bott | | | |
| om | | | |
| * right k x k triangle of the array is not referenced. | | | |
| * x double precision array of length at least (1+(n-1)*abs(incx)). | | | |
| * incx storage spacing between elements of x. It must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dtbsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0, n < 0 or n > 2035 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDtbsv (char uplo, char trans, char diag, int n, | | | |
| int k, const double *A, int lda, double *x, | | | |
| int incx); | | | |
| | | | |
|
| /* | | /*------------------------------------------------------------------------* | |
| * void | | / | |
| * cublasDsymv (char uplo, int n, double alpha, const double *A, int lda, | | /* TRSM*/ | |
| * const double *x, int incx, double beta, double *y, int incy | | static __inline__ void CUBLASAPI cublasStrsm (char side, char uplo, char tr | |
| ) | | ansa, char diag, | |
| * | | int m, int n, float alpha, const float *A, int | |
| * performs the matrix-vector operation | | lda, | |
| * | | float *B, int ldb) | |
| * y = alpha*A*x + beta*y | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * Alpha and beta are double precision scalars, and x and y are double | | cublasStatus_t error = cublasStrsm_v2(handle, convertToSideMode(side),c | |
| * precision vectors, each with n elements. A is a symmetric n x n matrix | | onvertToFillMode(uplo), | |
| * consisting of double precision elements that is stored in either upper o | | convertToOp(transa), convertToDiagType(d | |
| r | | iag), m, n, &alpha, A, lda, B, ldb ); | |
| * lower storage mode. | | cublasSetError(error); | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the upper or lower triangular part of the array | | | |
| A | | | |
| * is to be referenced. If uplo == 'U' or 'u', the symmetric matrix | | | |
| A | | | |
| * is stored in upper storage mode, i.e. only the upper triangular p | | | |
| art | | | |
| * of A is to be referenced while the lower triangular part of A is | | | |
| to | | | |
| * be inferred. If uplo == 'L' or 'l', the symmetric matrix A is sto | | | |
| red | | | |
| * in lower storage mode, i.e. only the lower triangular part of A i | | | |
| s | | | |
| * to be referenced while the upper triangular part of A is to be | | | |
| * inferred. | | | |
| * n specifies the number of rows and the number of columns of the | | | |
| * symmetric matrix A. n must be at least zero. | | | |
| * alpha double precision scalar multiplier applied to A*x. | | | |
| * A double precision array of dimensions (lda, n). If uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n upper triangular part of the array A must conta | | | |
| in | | | |
| * the upper triangular part of the symmetric matrix and the strictl | | | |
| y | | | |
| * lower triangular part of A is not referenced. If uplo == 'L' or ' | | | |
| l', | | | |
| * the leading n x n lower triangular part of the array A must conta | | | |
| in | | | |
| * the lower triangular part of the symmetric matrix and the strictl | | | |
| y | | | |
| * upper triangular part of A is not referenced. | | | |
| * lda leading dimension of A. It must be at least max (1, n). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta double precision scalar multiplier applied to vector y. | | | |
| * y double precision array of length at least (1 + (n - 1) * abs(incy | | | |
| )). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dsymv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDsymv (char uplo, int n, double alpha, | | | |
| const double *A, int lda, const double *x, | | | |
| int incx, double beta, double *y, int incy); | | | |
| | | | |
|
| /* | | } | |
| * void | | | |
| * cublasDsbmv (char uplo, int n, int k, double alpha, const double *A, int | | | |
| lda, | | | |
| * const double *x, int incx, double beta, double *y, int incy | | | |
| ) | | | |
| * | | | |
| * performs the matrix-vector operation | | | |
| * | | | |
| * y := alpha*A*x + beta*y | | | |
| * | | | |
| * alpha and beta are double precision scalars. x and y are double precisio | | | |
| n | | | |
| * vectors with n elements. A is an n by n symmetric band matrix consisting | | | |
| * of double precision elements, with k super-diagonals and the same number | | | |
| * of subdiagonals. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the upper or lower triangular part of the symme | | | |
| tric | | | |
| * band matrix A is being supplied. If uplo == 'U' or 'u', the upper | | | |
| * triangular part is being supplied. If uplo == 'L' or 'l', the low | | | |
| er | | | |
| * triangular part is being supplied. | | | |
| * n specifies the number of rows and the number of columns of the | | | |
| * symmetric matrix A. n must be at least zero. | | | |
| * k specifies the number of super-diagonals of matrix A. Since the ma | | | |
| trix | | | |
| * is symmetric, this is also the number of sub-diagonals. k must be | | | |
| at | | | |
| * least zero. | | | |
| * alpha double precision scalar multiplier applied to A*x. | | | |
| * A double precision array of dimensions (lda, n). When uplo == 'U' o | | | |
| r | | | |
| * 'u', the leading (k + 1) x n part of array A must contain the upp | | | |
| er | | | |
| * triangular band of the symmetric matrix, supplied column by colum | | | |
| n, | | | |
| * with the leading diagonal of the matrix in row (k+1) of the array | | | |
| , | | | |
| * the first super-diagonal starting at position 2 in row k, and so | | | |
| on. | | | |
| * The top left k x k triangle of the array A is not referenced. Whe | | | |
| n | | | |
| * uplo == 'L' or 'l', the leading (k + 1) x n part of the array A m | | | |
| ust | | | |
| * contain the lower triangular band part of the symmetric matrix, | | | |
| * supplied column by column, with the leading diagonal of the matri | | | |
| x in | | | |
| * row 1 of the array, the first sub-diagonal starting at position 1 | | | |
| in | | | |
| * row 2, and so on. The bottom right k x k triangle of the array A | | | |
| is | | | |
| * not referenced. | | | |
| * lda leading dimension of A. lda must be at least (k + 1). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta double precision scalar multiplier applied to vector y. If beta i | | | |
| s | | | |
| * zero, y is not read. | | | |
| * y double precision array of length at least (1 + (n - 1) * abs(incy | | | |
| )). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dsbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if k or n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDsbmv (char uplo, int n, int k, double alpha, | | | |
| const double *A, int lda, const double *x, | | | |
| int incx, double beta, double *y, int incy); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasDtrsm (char side, char uplo, char tr | |
| * void | | ansa, | |
| * cublasDspmv (char uplo, int n, double alpha, const double *AP, const dou | | char diag, int m, int n, double alpha, | |
| ble *x, | | const double *A, int lda, double *B, | |
| * int incx, double beta, double *y, int incy) | | int ldb) | |
| * | | { | |
| * performs the matrix-vector operation | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * | | cublasStatus_t error = cublasDtrsm_v2(handle, convertToSideMode(side),c | |
| * y = alpha * A * x + beta * y | | onvertToFillMode(uplo), | |
| * | | convertToOp(transa), convertToDiagType(d | |
| * Alpha and beta are double precision scalars, and x and y are double | | iag), m, n, &alpha, A, lda, B, ldb ); | |
| * precision vectors with n elements. A is a symmetric n x n matrix | | cublasSetError(error); | |
| * consisting of double precision elements that is supplied in packed form. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array AP. If uplo == 'U' or 'u', then the uppe | | | |
| r | | | |
| * triangular part of A is supplied in AP. If uplo == 'L' or 'l', th | | | |
| en | | | |
| * the lower triangular part of A is supplied in AP. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha double precision scalar multiplier applied to A*x. | | | |
| * AP double precision array with at least ((n * (n + 1)) / 2) elements | | | |
| . If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the symmetric matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta double precision scalar multiplier applied to vector y; | | | |
| * y double precision array of length at least (1 + (n - 1) * abs(incy | | | |
| )). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to y = alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dspmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDspmv (char uplo, int n, double alpha, | | | |
| const double *AP, const double *x, | | | |
| int incx, double beta, double *y, int incy); | | | |
| | | | |
|
| /* ---------------- CUBLAS double precision BLAS3 functions ---------------
- */ | | } | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasCtrsm (char side, char uplo, char tr | |
| * void | | ansa, char diag, | |
| * cublasDgemm (char transa, char transb, int m, int n, int k, double alpha | | int m, int n, cuComplex alpha, const cuComplex | |
| , | | *A, | |
| * const double *A, int lda, const double *B, int ldb, | | int lda, cuComplex *B, int ldb) | |
| * double beta, double *C, int ldc) | | { | |
| * | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * computes the product of matrix A and matrix B, multiplies the result | | cublasStatus_t error = cublasCtrsm_v2(handle, convertToSideMode(side),c | |
| * by scalar alpha, and adds the sum to the product of matrix C and | | onvertToFillMode(uplo), | |
| * scalar beta. It performs one of the matrix-matrix operations: | | convertToOp(transa), convertToDiagType(d | |
| * | | iag), m, n, &alpha, A, lda, B, ldb ); | |
| * C = alpha * op(A) * op(B) + beta * C, | | cublasSetError(error); | |
| * where op(X) = X or op(X) = transpose(X), | | | |
| * | | | |
| * and alpha and beta are double-precision scalars. A, B and C are matrices | | | |
| * consisting of double-precision elements, with op(A) an m x k matrix, | | | |
| * op(B) a k x n matrix, and C an m x n matrix. Matrices A, B, and C are | | | |
| * stored in column-major format, and lda, ldb, and ldc are the leading | | | |
| * dimensions of the two-dimensional arrays containing A, B, and C. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * transa specifies op(A). If transa == 'N' or 'n', op(A) = A. | | | |
| * If transa == 'T', 't', 'C', or 'c', op(A) = transpose(A). | | | |
| * transb specifies op(B). If transb == 'N' or 'n', op(B) = B. | | | |
| * If transb == 'T', 't', 'C', or 'c', op(B) = transpose(B). | | | |
| * m number of rows of matrix op(A) and rows of matrix C; m must be at | | | |
| * least zero. | | | |
| * n number of columns of matrix op(B) and number of columns of C; | | | |
| * n must be at least zero. | | | |
| * k number of columns of matrix op(A) and number of rows of op(B); | | | |
| * k must be at least zero. | | | |
| * alpha double-precision scalar multiplier applied to op(A) * op(B). | | | |
| * A double-precision array of dimensions (lda, k) if transa == 'N' or | | | |
| * 'n', and of dimensions (lda, m) otherwise. If transa == 'N' or | | | |
| * 'n' lda must be at least max(1, m), otherwise lda must be at | | | |
| * least max(1, k). | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| . | | | |
| * B double-precision array of dimensions (ldb, n) if transb == 'N' or | | | |
| * 'n', and of dimensions (ldb, k) otherwise. If transb == 'N' or | | | |
| * 'n' ldb must be at least max (1, k), otherwise ldb must be at | | | |
| * least max(1, n). | | | |
| * ldb leading dimension of two-dimensional array used to store matrix B | | | |
| . | | | |
| * beta double-precision scalar multiplier applied to C. If zero, C does | | | |
| not | | | |
| * have to be a valid input | | | |
| * C double-precision array of dimensions (ldc, n); ldc must be at lea | | | |
| st | | | |
| * max(1, m). | | | |
| * ldc leading dimension of two-dimensional array used to store matrix C | | | |
| . | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated based on C = alpha * op(A)*op(B) + beta * C. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/sgemm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS was not initialized | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m < 0, n < 0, or k < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDgemm (char transa, char transb, int m, int n, int k, | | | |
| double alpha, const double *A, int lda, | | | |
| const double *B, int ldb, double beta, double * | | | |
| C, | | | |
| int ldc); | | | |
| | | | |
|
| /* | | } | |
| * void | | | |
| * cublasDtrsm (char side, char uplo, char transa, char diag, int m, int n, | | | |
| * double alpha, const double *A, int lda, double *B, int ldb) | | | |
| * | | | |
| * solves one of the matrix equations | | | |
| * | | | |
| * op(A) * X = alpha * B, or X * op(A) = alpha * B, | | | |
| * | | | |
| * where alpha is a double precision scalar, and X and B are m x n matrices | | | |
| * that are composed of double precision elements. A is a unit or non-unit, | | | |
| * upper or lower triangular matrix, and op(A) is one of | | | |
| * | | | |
| * op(A) = A or op(A) = transpose(A) | | | |
| * | | | |
| * The result matrix X overwrites input matrix B; that is, on exit the resu | | | |
| lt | | | |
| * is stored in B. Matrices A and B are stored in column major format, and | | | |
| * lda and ldb are the leading dimensions of the two-dimensonials arrays th | | | |
| at | | | |
| * contain A and B, respectively. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether op(A) appears on the left or right of X as | | | |
| * follows: side = 'L' or 'l' indicates solve op(A) * X = alpha * B. | | | |
| * side = 'R' or 'r' indicates solve X * op(A) = alpha * B. | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix as follows: uplo = 'U' or 'u' indicates A is an upper | | | |
| * triangular matrix. uplo = 'L' or 'l' indicates A is a lower | | | |
| * triangular matrix. | | | |
| * transa specifies the form of op(A) to be used in matrix multiplication | | | |
| * as follows: If transa = 'N' or 'N', then op(A) = A. If transa = | | | |
| * 'T', 't', 'C', or 'c', then op(A) = transpose(A). | | | |
| * diag specifies whether or not A is a unit triangular matrix like so: | | | |
| * if diag = 'U' or 'u', A is assumed to be unit triangular. If | | | |
| * diag = 'N' or 'n', then A is not assumed to be unit triangular. | | | |
| * m specifies the number of rows of B. m must be at least zero. | | | |
| * n specifies the number of columns of B. n must be at least zero. | | | |
| * alpha is a double precision scalar to be multiplied with B. When alpha | | | |
| is | | | |
| * zero, then A is not referenced and B need not be set before entry | | | |
| . | | | |
| * A is a double precision array of dimensions (lda, k), where k is | | | |
| * m when side = 'L' or 'l', and is n when side = 'R' or 'r'. If | | | |
| * uplo = 'U' or 'u', the leading k x k upper triangular part of | | | |
| * the array A must contain the upper triangular matrix and the | | | |
| * strictly lower triangular matrix of A is not referenced. When | | | |
| * uplo = 'L' or 'l', the leading k x k lower triangular part of | | | |
| * the array A must contain the lower triangular matrix and the | | | |
| * strictly upper triangular part of A is not referenced. Note that | | | |
| * when diag = 'U' or 'u', the diagonal elements of A are not | | | |
| * referenced, and are assumed to be unity. | | | |
| * lda is the leading dimension of the two dimensional array containing | | | |
| A. | | | |
| * When side = 'L' or 'l' then lda must be at least max(1, m), when | | | |
| * side = 'R' or 'r' then lda must be at least max(1, n). | | | |
| * B is a double precision array of dimensions (ldb, n). ldb must be | | | |
| * at least max (1,m). The leading m x n part of the array B must | | | |
| * contain the right-hand side matrix B. On exit B is overwritten | | | |
| * by the solution matrix X. | | | |
| * ldb is the leading dimension of the two dimensional array containing | | | |
| B. | | | |
| * ldb must be at least max(1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * B contains the solution matrix X satisfying op(A) * X = alpha * B, | | | |
| * or X * op(A) = alpha * B | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dtrsm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDtrsm (char side, char uplo, char transa, | | | |
| char diag, int m, int n, double alpha, | | | |
| const double *A, int lda, double *B, | | | |
| int ldb); | | | |
| | | | |
|
| /* | | static __inline__ void CUBLASAPI cublasZtrsm (char side, char uplo, char tr | |
| * void | | ansa, | |
| * cublasZtrsm (char side, char uplo, char transa, char diag, int m, int n, | | | |
| * cuDoubleComplex alpha, const cuDoubleComplex *A, int lda, | | | |
| * cuDoubleComplex *B, int ldb) | | | |
| * | | | |
| * solves one of the matrix equations | | | |
| * | | | |
| * op(A) * X = alpha * B, or X * op(A) = alpha * B, | | | |
| * | | | |
| * where alpha is a double precision complex scalar, and X and B are m x n | | | |
| matrices | | | |
| * that are composed of double precision complex elements. A is a unit or n | | | |
| on-unit, | | | |
| * upper or lower triangular matrix, and op(A) is one of | | | |
| * | | | |
| * op(A) = A or op(A) = transpose(A) or op( A ) = conj( A' ). | | | |
| * | | | |
| * The result matrix X overwrites input matrix B; that is, on exit the resu | | | |
| lt | | | |
| * is stored in B. Matrices A and B are stored in column major format, and | | | |
| * lda and ldb are the leading dimensions of the two-dimensonials arrays th | | | |
| at | | | |
| * contain A and B, respectively. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether op(A) appears on the left or right of X as | | | |
| * follows: side = 'L' or 'l' indicates solve op(A) * X = alpha * B. | | | |
| * side = 'R' or 'r' indicates solve X * op(A) = alpha * B. | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix as follows: uplo = 'U' or 'u' indicates A is an upper | | | |
| * triangular matrix. uplo = 'L' or 'l' indicates A is a lower | | | |
| * triangular matrix. | | | |
| * transa specifies the form of op(A) to be used in matrix multiplication | | | |
| * as follows: If transa = 'N' or 'N', then op(A) = A. If transa = | | | |
| * 'T', 't', 'C', or 'c', then op(A) = transpose(A). | | | |
| * diag specifies whether or not A is a unit triangular matrix like so: | | | |
| * if diag = 'U' or 'u', A is assumed to be unit triangular. If | | | |
| * diag = 'N' or 'n', then A is not assumed to be unit triangular. | | | |
| * m specifies the number of rows of B. m must be at least zero. | | | |
| * n specifies the number of columns of B. n must be at least zero. | | | |
| * alpha is a double precision complex scalar to be multiplied with B. Whe | | | |
| n alpha is | | | |
| * zero, then A is not referenced and B need not be set before entry | | | |
| . | | | |
| * A is a double precision complex array of dimensions (lda, k), where | | | |
| k is | | | |
| * m when side = 'L' or 'l', and is n when side = 'R' or 'r'. If | | | |
| * uplo = 'U' or 'u', the leading k x k upper triangular part of | | | |
| * the array A must contain the upper triangular matrix and the | | | |
| * strictly lower triangular matrix of A is not referenced. When | | | |
| * uplo = 'L' or 'l', the leading k x k lower triangular part of | | | |
| * the array A must contain the lower triangular matrix and the | | | |
| * strictly upper triangular part of A is not referenced. Note that | | | |
| * when diag = 'U' or 'u', the diagonal elements of A are not | | | |
| * referenced, and are assumed to be unity. | | | |
| * lda is the leading dimension of the two dimensional array containing | | | |
| A. | | | |
| * When side = 'L' or 'l' then lda must be at least max(1, m), when | | | |
| * side = 'R' or 'r' then lda must be at least max(1, n). | | | |
| * B is a double precision complex array of dimensions (ldb, n). ldb m | | | |
| ust be | | | |
| * at least max (1,m). The leading m x n part of the array B must | | | |
| * contain the right-hand side matrix B. On exit B is overwritten | | | |
| * by the solution matrix X. | | | |
| * ldb is the leading dimension of the two dimensional array containing | | | |
| B. | | | |
| * ldb must be at least max(1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * B contains the solution matrix X satisfying op(A) * X = alpha * B, | | | |
| * or X * op(A) = alpha * B | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ztrsm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZtrsm (char side, char uplo, char transa, | | | |
| char diag, int m, int n, cuDoubleComplex alpha, | | char diag, int m, int n, cuDoubleComplex alpha, | |
| const cuDoubleComplex *A, int lda, | | const cuDoubleComplex *A, int lda, | |
|
| cuDoubleComplex *B, int ldb); | | cuDoubleComplex *B, int ldb) | |
| | | { | |
| | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| | | cublasStatus_t error = cublasZtrsm_v2(handle, convertToSideMode(side),c | |
| | | onvertToFillMode(uplo), | |
| | | convertToOp(transa), convertToDiagType(d | |
| | | iag), m, n, &alpha, A, lda, B, ldb ); | |
| | | cublasSetError(error); | |
| | | | |
|
| /* | | } | |
| * void | | /*------------------------------------------------------------------------* | |
| * cublasDtrmm (char side, char uplo, char transa, char diag, int m, int n, | | / | |
| * double alpha, const double *A, int lda, const double *B, in | | /* TRMM*/ | |
| t ldb) | | static __inline__ void CUBLASAPI cublasStrmm (char side, char uplo, char tr | |
| * | | ansa, char diag, | |
| * performs one of the matrix-matrix operations | | int m, int n, float alpha, const float *A, int | |
| * | | lda, | |
| * B = alpha * op(A) * B, or B = alpha * B * op(A) | | float *B, int ldb) | |
| * | | { | |
| * where alpha is a double-precision scalar, B is an m x n matrix composed | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * of double precision elements, and A is a unit or non-unit, upper or lowe | | cublasStatus_t error = cublasStrmm_v2(handle, convertToSideMode(side),c | |
| r, | | onvertToFillMode(uplo), | |
| * triangular matrix composed of double precision elements. op(A) is one of | | convertToOp(transa), convertToDiagType(d | |
| * | | iag), m, n, &alpha, A, lda, B, ldb, B, ldb); | |
| * op(A) = A or op(A) = transpose(A) | | cublasSetError(error); | |
| * | | } | |
| * Matrices A and B are stored in column major format, and lda and ldb are | | static __inline__ void CUBLASAPI cublasDtrmm (char side, char uplo, char tr | |
| * the leading dimensions of the two-dimensonials arrays that contain A and | | ansa, | |
| * B, respectively. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether op(A) multiplies B from the left or right. | | | |
| * If side = 'L' or 'l', then B = alpha * op(A) * B. If side = | | | |
| * 'R' or 'r', then B = alpha * B * op(A). | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo = 'U' or 'u', A is an upper triangular matrix. | | | |
| * If uplo = 'L' or 'l', A is a lower triangular matrix. | | | |
| * transa specifies the form of op(A) to be used in the matrix | | | |
| * multiplication. If transa = 'N' or 'n', then op(A) = A. If | | | |
| * transa = 'T', 't', 'C', or 'c', then op(A) = transpose(A). | | | |
| * diag specifies whether or not A is unit triangular. If diag = 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag = 'N' or | | | |
| * 'n', A is not assumed to be unit triangular. | | | |
| * m the number of rows of matrix B. m must be at least zero. | | | |
| * n the number of columns of matrix B. n must be at least zero. | | | |
| * alpha double precision scalar multiplier applied to op(A)*B, or | | | |
| * B*op(A), respectively. If alpha is zero no accesses are made | | | |
| * to matrix A, and no read accesses are made to matrix B. | | | |
| * A double precision array of dimensions (lda, k). k = m if side = | | | |
| * 'L' or 'l', k = n if side = 'R' or 'r'. If uplo = 'U' or 'u' | | | |
| * the leading k x k upper triangular part of the array A must | | | |
| * contain the upper triangular matrix, and the strictly lower | | | |
| * triangular part of A is not referenced. If uplo = 'L' or 'l' | | | |
| * the leading k x k lower triangular part of the array A must | | | |
| * contain the lower triangular matrix, and the strictly upper | | | |
| * triangular part of A is not referenced. When diag = 'U' or 'u' | | | |
| * the diagonal elements of A are no referenced and are assumed | | | |
| * to be unity. | | | |
| * lda leading dimension of A. When side = 'L' or 'l', it must be at | | | |
| * least max(1,m) and at least max(1,n) otherwise | | | |
| * B double precision array of dimensions (ldb, n). On entry, the | | | |
| * leading m x n part of the array contains the matrix B. It is | | | |
| * overwritten with the transformed matrix on exit. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * B updated according to B = alpha * op(A) * B or B = alpha * B * op | | | |
| (A) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dtrmm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDtrmm (char side, char uplo, char transa, | | | |
| char diag, int m, int n, double alpha, | | char diag, int m, int n, double alpha, | |
| const double *A, int lda, double *B, | | const double *A, int lda, double *B, | |
|
| int ldb); | | int ldb) | |
| | | { | |
| /* | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * void | | cublasStatus_t error = cublasDtrmm_v2(handle, convertToSideMode(side),c | |
| * cublasDsymm (char side, char uplo, int m, int n, double alpha, | | onvertToFillMode(uplo), | |
| * const double *A, int lda, const double *B, int ldb, | | convertToOp(transa), convertToDiagType(d | |
| * double beta, double *C, int ldc); | | iag), m, n, &alpha, A, lda, B, ldb, B, ldb ); | |
| * | | cublasSetError(error); | |
| * performs one of the matrix-matrix operations | | } | |
| * | | static __inline__ void CUBLASAPI cublasCtrmm (char side, char uplo, char tr | |
| * C = alpha * A * B + beta * C, or | | ansa, char diag, | |
| * C = alpha * B * A + beta * C, | | int m, int n, cuComplex alpha, const cuComplex | |
| * | | *A, | |
| * where alpha and beta are double precision scalars, A is a symmetric matr | | int lda, cuComplex *B, int ldb) | |
| ix | | { | |
| * consisting of double precision elements and stored in either lower or up | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| per | | cublasStatus_t error = cublasCtrmm_v2(handle, convertToSideMode(side),c | |
| * storage mode, and B and C are m x n matrices consisting of double precis | | onvertToFillMode(uplo), | |
| ion | | convertToOp(transa), convertToDiagType(d | |
| * elements. | | iag), m, n, &alpha, A, lda, B, ldb, B, ldb ); | |
| * | | cublasSetError(error); | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether the symmetric matrix A appears on the left side | | | |
| * hand side or right hand side of matrix B, as follows. If side == | | | |
| 'L' | | | |
| * or 'l', then C = alpha * A * B + beta * C. If side = 'R' or 'r', | | | |
| * then C = alpha * B * A + beta * C. | | | |
| * uplo specifies whether the symmetric matrix A is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * m specifies the number of rows of the matrix C, and the number of r | | | |
| ows | | | |
| * of matrix B. It also specifies the dimensions of symmetric matrix | | | |
| A | | | |
| * when side == 'L' or 'l'. m must be at least zero. | | | |
| * n specifies the number of columns of the matrix C, and the number o | | | |
| f | | | |
| * columns of matrix B. It also specifies the dimensions of symmetri | | | |
| c | | | |
| * matrix A when side == 'R' or 'r'. n must be at least zero. | | | |
| * alpha double precision scalar multiplier applied to A * B, or B * A | | | |
| * A double precision array of dimensions (lda, ka), where ka is m whe | | | |
| n | | | |
| * side == 'L' or 'l' and is n otherwise. If side == 'L' or 'l' the | | | |
| * leading m x m part of array A must contain the symmetric matrix, | | | |
| * such that when uplo == 'U' or 'u', the leading m x m part stores | | | |
| the | | | |
| * upper triangular part of the symmetric matrix, and the strictly l | | | |
| ower | | | |
| * triangular part of A is not referenced, and when uplo == 'U' or ' | | | |
| u', | | | |
| * the leading m x m part stores the lower triangular part of the | | | |
| * symmetric matrix and the strictly upper triangular part is not | | | |
| * referenced. If side == 'R' or 'r' the leading n x n part of array | | | |
| A | | | |
| * must contain the symmetric matrix, such that when uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n part stores the upper triangular part of the | | | |
| * symmetric matrix and the strictly lower triangular part of A is n | | | |
| ot | | | |
| * referenced, and when uplo == 'U' or 'u', the leading n x n part | | | |
| * stores the lower triangular part of the symmetric matrix and the | | | |
| * strictly upper triangular part is not referenced. | | | |
| * lda leading dimension of A. When side == 'L' or 'l', it must be at le | | | |
| ast | | | |
| * max(1, m) and at least max(1, n) otherwise. | | | |
| * B double precision array of dimensions (ldb, n). On entry, the lead | | | |
| ing | | | |
| * m x n part of the array contains the matrix B. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * beta double precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input | | | |
| * C double precision array of dimensions (ldc, n) | | | |
| * ldc leading dimension of C. Must be at least max(1, m) | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * B + beta * C, or C = alpha * | | | |
| * B * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dsymm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDsymm (char side, char uplo, int m, int n, | | | |
| double alpha, const double *A, int lda, | | | |
| const double *B, int ldb, double beta, | | | |
| double *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZsymm (char side, char uplo, int m, int n, cuDoubleComplex alpha, | | | |
| * const cuDoubleComplex *A, int lda, const cuDoubleComplex *B | | | |
| , int ldb, | | | |
| * cuDoubleComplex beta, cuDoubleComplex *C, int ldc); | | | |
| * | | | |
| * performs one of the matrix-matrix operations | | | |
| * | | | |
| * C = alpha * A * B + beta * C, or | | | |
| * C = alpha * B * A + beta * C, | | | |
| * | | | |
| * where alpha and beta are double precision complex scalars, A is a symmet | | | |
| ric matrix | | | |
| * consisting of double precision complex elements and stored in either low | | | |
| er or upper | | | |
| * storage mode, and B and C are m x n matrices consisting of double precis | | | |
| ion | | | |
| * complex elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether the symmetric matrix A appears on the left side | | | |
| * hand side or right hand side of matrix B, as follows. If side == | | | |
| 'L' | | | |
| * or 'l', then C = alpha * A * B + beta * C. If side = 'R' or 'r', | | | |
| * then C = alpha * B * A + beta * C. | | | |
| * uplo specifies whether the symmetric matrix A is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * m specifies the number of rows of the matrix C, and the number of r | | | |
| ows | | | |
| * of matrix B. It also specifies the dimensions of symmetric matrix | | | |
| A | | | |
| * when side == 'L' or 'l'. m must be at least zero. | | | |
| * n specifies the number of columns of the matrix C, and the number o | | | |
| f | | | |
| * columns of matrix B. It also specifies the dimensions of symmetri | | | |
| c | | | |
| * matrix A when side == 'R' or 'r'. n must be at least zero. | | | |
| * alpha double precision scalar multiplier applied to A * B, or B * A | | | |
| * A double precision array of dimensions (lda, ka), where ka is m whe | | | |
| n | | | |
| * side == 'L' or 'l' and is n otherwise. If side == 'L' or 'l' the | | | |
| * leading m x m part of array A must contain the symmetric matrix, | | | |
| * such that when uplo == 'U' or 'u', the leading m x m part stores | | | |
| the | | | |
| * upper triangular part of the symmetric matrix, and the strictly l | | | |
| ower | | | |
| * triangular part of A is not referenced, and when uplo == 'U' or ' | | | |
| u', | | | |
| * the leading m x m part stores the lower triangular part of the | | | |
| * symmetric matrix and the strictly upper triangular part is not | | | |
| * referenced. If side == 'R' or 'r' the leading n x n part of array | | | |
| A | | | |
| * must contain the symmetric matrix, such that when uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n part stores the upper triangular part of the | | | |
| * symmetric matrix and the strictly lower triangular part of A is n | | | |
| ot | | | |
| * referenced, and when uplo == 'U' or 'u', the leading n x n part | | | |
| * stores the lower triangular part of the symmetric matrix and the | | | |
| * strictly upper triangular part is not referenced. | | | |
| * lda leading dimension of A. When side == 'L' or 'l', it must be at le | | | |
| ast | | | |
| * max(1, m) and at least max(1, n) otherwise. | | | |
| * B double precision array of dimensions (ldb, n). On entry, the lead | | | |
| ing | | | |
| * m x n part of the array contains the matrix B. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * beta double precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input | | | |
| * C double precision array of dimensions (ldc, n) | | | |
| * ldc leading dimension of C. Must be at least max(1, m) | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * B + beta * C, or C = alpha * | | | |
| * B * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zsymm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZsymm (char side, char uplo, int m, int n, | | | |
| cuDoubleComplex alpha, const cuDoubleComplex *A | | | |
| , int lda, | | | |
| const cuDoubleComplex *B, int ldb, cuDoubleComp | | | |
| lex beta, | | | |
| cuDoubleComplex *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDsyrk (char uplo, char trans, int n, int k, double alpha, | | | |
| * const double *A, int lda, double beta, double *C, int ldc) | | | |
| * | | | |
| * performs one of the symmetric rank k operations | | | |
| * | | | |
| * C = alpha * A * transpose(A) + beta * C, or | | | |
| * C = alpha * transpose(A) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are double precision scalars. C is an n x n symmetric mat | | | |
| rix | | | |
| * consisting of double precision elements and stored in either lower or | | | |
| * upper storage mode. A is a matrix consisting of double precision element | | | |
| s | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the symmetric matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| C = | | | |
| * alpha * transpose(A) + beta * C. If trans == 'T', 't', 'C', or 'c | | | |
| ', | | | |
| * C = transpose(A) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha double precision scalar multiplier applied to A * transpose(A) or | | | |
| * transpose(A) * A. | | | |
| * A double precision array of dimensions (lda, ka), where ka is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contains the | | | |
| * matrix A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1, k). | | | |
| * beta double precision scalar multiplier applied to C. If beta izs zero | | | |
| , C | | | |
| * does not have to be a valid input | | | |
| * C double precision array of dimensions (ldc, n). If uplo = 'U' or ' | | | |
| u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the symmetric matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo = 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the symmetric matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * ldc leading dimension of C. It must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * transpose(A) + beta * C, or | | | |
| C = | | | |
| * alpha * transpose(A) * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dsyrk.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDsyrk (char uplo, char trans, int n, int k, | | | |
| double alpha, const double *A, int lda, | | | |
| double beta, double *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZsyrk (char uplo, char trans, int n, int k, cuDoubleComplex alpha, | | | |
| * const cuDoubleComplex *A, int lda, cuDoubleComplex beta, cu | | | |
| DoubleComplex *C, int ldc) | | | |
| * | | | |
| * performs one of the symmetric rank k operations | | | |
| * | | | |
| * C = alpha * A * transpose(A) + beta * C, or | | | |
| * C = alpha * transpose(A) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are double precision complex scalars. C is an n x n symme | | | |
| tric matrix | | | |
| * consisting of double precision complex elements and stored in either low | | | |
| er or | | | |
| * upper storage mode. A is a matrix consisting of double precision complex | | | |
| elements | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the symmetric matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| C = | | | |
| * alpha * transpose(A) + beta * C. If trans == 'T', 't', 'C', or 'c | | | |
| ', | | | |
| * C = transpose(A) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha double precision complex scalar multiplier applied to A * transpo | | | |
| se(A) or | | | |
| * transpose(A) * A. | | | |
| * A double precision complex array of dimensions (lda, ka), where ka | | | |
| is k when | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contains the | | | |
| * matrix A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1, k). | | | |
| * beta double precision complex scalar multiplier applied to C. If beta | | | |
| izs zero, C | | | |
| * does not have to be a valid input | | | |
| * C double precision complex array of dimensions (ldc, n). If uplo = | | | |
| 'U' or 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the symmetric matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo = 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the symmetric matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * ldc leading dimension of C. It must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * transpose(A) + beta * C, or | | | |
| C = | | | |
| * alpha * transpose(A) * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zsyrk.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZsyrk (char uplo, char trans, int n, int k, | | | |
| cuDoubleComplex alpha, | | | |
| const cuDoubleComplex *A, int lda, | | | |
| cuDoubleComplex beta, | | | |
| cuDoubleComplex *C, int ldc); | | | |
| /* | | | |
| * void | | | |
| * cublasZsyr2k (char uplo, char trans, int n, int k, cuDoubleComplex alpha | | | |
| , | | | |
| * const cuDoubleComplex *A, int lda, const cuDoubleComplex * | | | |
| B, int ldb, | | | |
| * cuDoubleComplex beta, cuDoubleComplex *C, int ldc) | | | |
| * | | | |
| * performs one of the symmetric rank 2k operations | | | |
| * | | | |
| * C = alpha * A * transpose(B) + alpha * B * transpose(A) + beta * C, o | | | |
| r | | | |
| * C = alpha * transpose(A) * B + alpha * transpose(B) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are double precision complex scalars. C is an n x n symme | | | |
| tric matrix | | | |
| * consisting of double precision complex elements and stored in either low | | | |
| er or upper | | | |
| * storage mode. A and B are matrices consisting of double precision comple | | | |
| x elements | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the symmetric matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be references | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| * C = alpha * A * transpose(B) + alpha * B * transpose(A) + beta * | | | |
| C, | | | |
| * If trans == 'T', 't', 'C', or 'c', C = alpha * transpose(A) * B + | | | |
| * alpha * transpose(B) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha double precision scalar multiplier. | | | |
| * A double precision array of dimensions (lda, ka), where ka is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1,k). | | | |
| * B double precision array of dimensions (lda, kb), where kb is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array B must contain the matrix B, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * B. | | | |
| * ldb leading dimension of N. When trans == 'N' or 'n' then ldb must be | | | |
| at | | | |
| * least max(1, n). Otherwise ldb must be at least max(1, k). | | | |
| * beta double precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input. | | | |
| * C double precision array of dimensions (ldc, n). If uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the symmetric matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo == 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the symmetric matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * ldc leading dimension of C. Must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to alpha*A*transpose(B) + alpha*B*transpose(A) | | | |
| + | | | |
| * beta*C or alpha*transpose(A)*B + alpha*transpose(B)*A + beta*C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zsyr2k.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZsyr2k (char uplo, char trans, int n, int k, | | | |
| cuDoubleComplex alpha, const cuDoubleComplex * | | | |
| A, int lda, | | | |
| const cuDoubleComplex *B, int ldb, cuDoubleCom | | | |
| plex beta, | | | |
| cuDoubleComplex *C, int ldc); | | | |
| /* | | | |
| * void | | | |
| * cublasZher2k (char uplo, char trans, int n, int k, cuDoubleComplex alpha | | | |
| , | | | |
| * const cuDoubleComplex *A, int lda, const cuDoubleComplex * | | | |
| B, int ldb, | | | |
| * double beta, cuDoubleComplex *C, int ldc) | | | |
| * | | | |
| * performs one of the hermitian rank 2k operations | | | |
| * | | | |
| * C = alpha * A * conjugate(transpose(B)) | | | |
| * + conjugate(alpha) * B * conjugate(transpose(A)) | | | |
| * + beta * C , | | | |
| * or | | | |
| * C = alpha * conjugate(transpose(A)) * B | | | |
| * + conjugate(alpha) * conjugate(transpose(B)) * A | | | |
| * + beta * C. | | | |
| * | | | |
| * Alpha is double precision complex scalar whereas Beta is a double precis | | | |
| ion real scalar. | | | |
| * C is an n x n hermitian matrix consisting of double precision complex el | | | |
| ements and | | | |
| * stored in either lower or upper storage mode. A and B are matrices consi | | | |
| sting of | | | |
| * double precision complex elements with dimension of n x k in the first c | | | |
| ase, | | | |
| * and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the hermitian matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the hermitian matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the hermitian matrix is to be references | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| * C = alpha * A * conjugate(transpose(B)) | | | |
| * + conjugate(alpha) * B * conjugate(transpose(A)) | | | |
| * + beta * C . | | | |
| * If trans == 'T', 't', 'C', or 'c', | | | |
| * C = alpha * conjugate(transpose(A)) * B | | | |
| * + conjugate(alpha) * conjugate(transpose(B)) * A | | | |
| * + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha double precision scalar multiplier. | | | |
| * A double precision array of dimensions (lda, ka), where ka is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1,k). | | | |
| * B double precision array of dimensions (lda, kb), where kb is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array B must contain the matrix B, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * B. | | | |
| * ldb leading dimension of N. When trans == 'N' or 'n' then ldb must be | | | |
| at | | | |
| * least max(1, n). Otherwise ldb must be at least max(1, k). | | | |
| * beta double precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input. | | | |
| * C double precision array of dimensions (ldc, n). If uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the hermitian matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo == 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the hermitian matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * The imaginary parts of the diagonal elements need | | | |
| * not be set, they are assumed to be zero, and on exit they | | | |
| * are set to zero. | | | |
| * ldc leading dimension of C. Must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to alpha*A*conjugate(transpose(B)) + | | | |
| * + conjugate(alpha)*B*conjugate(transpose(A)) + beta*C or | | | |
| * alpha*conjugate(transpose(A))*B + conjugate(alpha)*conjugate(tran | | | |
| spose(B))*A | | | |
| * + beta*C. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zher2k.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZher2k (char uplo, char trans, int n, int k, | | | |
| cuDoubleComplex alpha, const cuDoubleComplex * | | | |
| A, int lda, | | | |
| const cuDoubleComplex *B, int ldb, double beta | | | |
| , | | | |
| cuDoubleComplex *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZher (char uplo, int n, double alpha, const cuDoubleComplex *x, in | | | |
| t incx, | | | |
| * cuDoubleComplex *A, int lda) | | | |
| * | | | |
| * performs the hermitian rank 1 operation | | | |
| * | | | |
| * A = alpha * x * conjugate(transpose(x)) + A, | | | |
| * | | | |
| * where alpha is a double precision real scalar, x is an n element double | | | |
| * precision complex vector and A is an n x n hermitian matrix consisting o | | | |
| f | | | |
| * double precision complex elements. Matrix A is stored in column major fo | | | |
| rmat, | | | |
| * and lda is the leading dimension of the two-dimensional array | | | |
| * containing A. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or | | | |
| * the lower triangular part of array A. If uplo = 'U' or 'u', | | | |
| * then only the upper triangular part of A may be referenced. | | | |
| * If uplo = 'L' or 'l', then only the lower triangular part of | | | |
| * A may be referenced. | | | |
| * n specifies the number of rows and columns of the matrix A. It | | | |
| * must be at least 0. | | | |
| * alpha double precision real scalar multiplier applied to | | | |
| * x * conjugate(transpose(x)) | | | |
| * x double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)) | | | |
| * incx specifies the storage spacing between elements of x. incx must | | | |
| * not be zero. | | | |
| * A double precision complex array of dimensions (lda, n). If uplo = | | | |
| 'U' or | | | |
| * 'u', then A must contain the upper triangular part of a hermitian | | | |
| * matrix, and the strictly lower triangular part is not referenced. | | | |
| * If uplo = 'L' or 'l', then A contains the lower triangular part | | | |
| * of a hermitian matrix, and the strictly upper triangular part is | | | |
| * not referenced. The imaginary parts of the diagonal elements need | | | |
| * not be set, they are assumed to be zero, and on exit they | | | |
| * are set to zero. | | | |
| * lda leading dimension of the two-dimensional array containing A. lda | | | |
| * must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * conjugate(transpose(x)) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zher.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or incx == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZher (char uplo, int n, double alpha, | | | |
| const cuDoubleComplex *x, int incx, cuDoubleComp | | | |
| lex *A, | | | |
| int lda); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZher (char uplo, int n, double alpha, const cuDoubleComplex *x, in | | | |
| t incx, | | | |
| * cuDoubleComplex *A, int lda) | | | |
| * | | | |
| * performs the hermitian rank 1 operation | | | |
| * | | | |
| * A = alpha * x * conjugate(transpose(x) + A, | | | |
| * | | | |
| * where alpha is a double precision real scalar, x is an n element double | | | |
| * precision complex vector and A is an n x n hermitian matrix consisting o | | | |
| f | | | |
| * double precision complex elements. Matrix A is stored in column major fo | | | |
| rmat, | | | |
| * and lda is the leading dimension of the two-dimensional array | | | |
| * containing A. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or | | | |
| * the lower triangular part of array A. If uplo = 'U' or 'u', | | | |
| * then only the upper triangular part of A may be referenced. | | | |
| * If uplo = 'L' or 'l', then only the lower triangular part of | | | |
| * A may be referenced. | | | |
| * n specifies the number of rows and columns of the matrix A. It | | | |
| * must be at least 0. | | | |
| * alpha double precision real scalar multiplier applied to | | | |
| * x * conjugate(transpose(x)) | | | |
| * x double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)) | | | |
| * incx specifies the storage spacing between elements of x. incx must | | | |
| * not be zero. | | | |
| * A double precision complex array of dimensions (lda, n). If uplo = | | | |
| 'U' or | | | |
| * 'u', then A must contain the upper triangular part of a hermitian | | | |
| * matrix, and the strictly lower triangular part is not referenced. | | | |
| * If uplo = 'L' or 'l', then A contains the lower triangular part | | | |
| * of a hermitian matrix, and the strictly upper triangular part is | | | |
| * not referenced. The imaginary parts of the diagonal elements need | | | |
| * not be set, they are assumed to be zero, and on exit they | | | |
| * are set to zero. | | | |
| * lda leading dimension of the two-dimensional array containing A. lda | | | |
| * must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * conjugate(transpose(x) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zher.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or incx == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZher (char uplo, int n, double alpha, const cuDoubleCo | | | |
| mplex *x, | | | |
| int incx, cuDoubleComplex *A, int lda); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZhpr (char uplo, int n, double alpha, const cuDoubleComplex *x, in | | | |
| t incx, | | | |
| * cuDoubleComplex *AP) | | | |
| * | | | |
| * performs the hermitian rank 1 operation | | | |
| * | | | |
| * A = alpha * x * conjugate(transpose(x)) + A, | | | |
| * | | | |
| * where alpha is a double precision real scalar and x is an n element doub | | | |
| le | | | |
| * precision complex vector. A is a hermitian n x n matrix consisting of do | | | |
| uble | | | |
| * precision complex elements that is supplied in packed form. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array AP. If uplo == 'U' or 'u', then the uppe | | | |
| r | | | |
| * triangular part of A is supplied in AP. If uplo == 'L' or 'l', th | | | |
| en | | | |
| * the lower triangular part of A is supplied in AP. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha double precision real scalar multiplier applied to x * conjugate( | | | |
| transpose(x)). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs(incx | | | |
| )). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * AP double precision complex array with at least ((n * (n + 1)) / 2) | | | |
| elements. If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * The imaginary parts of the diagonal elements need not be set, the | | | |
| y | | | |
| * are assumed to be zero, and on exit they are set to zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * conjugate(transpose(x)) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zhpr.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, or incx == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZhpr (char uplo, int n, double alpha, | | | |
| const cuDoubleComplex *x, int incx, cuDoubleComp | | | |
| lex *AP); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZhpr2 (char uplo, int n, cuDoubleComplex alpha, const cuDoubleComp | | | |
| lex *x, int incx, | | | |
| * const cuDoubleComplex *y, int incy, cuDoubleComplex *AP) | | | |
| * | | | |
| * performs the hermitian rank 2 operation | | | |
| * | | | |
| * A = alpha*x*conjugate(transpose(y)) + conjugate(alpha)*y*conjugate(tr | | | |
| anspose(x)) + A, | | | |
| * | | | |
| * where alpha is a double precision complex scalar, and x and y are n elem | | | |
| ent double | | | |
| * precision complex vectors. A is a hermitian n x n matrix consisting of d | | | |
| ouble | | | |
| * precision complex elements that is supplied in packed form. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array A. If uplo == 'U' or 'u', then only the | | | |
| * upper triangular part of A may be referenced and the lower triang | | | |
| ular | | | |
| * part of A is inferred. If uplo == 'L' or 'l', then only the lower | | | |
| * triangular part of A may be referenced and the upper triangular p | | | |
| art | | | |
| * of A is inferred. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha double precision complex scalar multiplier applied to x * conjuga | | | |
| te(transpose(y)) + | | | |
| * y * conjugate(transpose(x)). | | | |
| * x double precision complex array of length at least (1 + (n - 1) * | | | |
| abs (incx)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * y double precision complex array of length at least (1 + (n - 1) * | | | |
| abs (incy)). | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * AP double precision complex array with at least ((n * (n + 1)) / 2) | | | |
| elements. If | | | |
| * uplo == 'U' or 'u', the array AP contains the upper triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i <= j, then A[i,j] is stored is AP[i+(j*(j+1)/2)]. I | | | |
| f | | | |
| * uplo == 'L' or 'L', the array AP contains the lower triangular pa | | | |
| rt | | | |
| * of the hermitian matrix A, packed sequentially, column by column; | | | |
| * that is, if i >= j, then A[i,j] is stored in AP[i+((2*n-j+1)*j)/2 | | | |
| ]. | | | |
| * The imaginary parts of the diagonal elements need not be set, the | | | |
| y | | | |
| * are assumed to be zero, and on exit they are set to zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha*x*conjugate(transpose(y)) | | | |
| * + conjugate(alpha)*y*conjugate(transpose(x | | | |
| ))+A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zhpr2.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZhpr2 (char uplo, int n, cuDoubleComplex alpha, | | | |
| const cuDoubleComplex *x, int incx, const cuDou | | | |
| bleComplex *y, | | | |
| int incy, cuDoubleComplex *AP); | | | |
| | | | |
| /* | | | |
| * void cublasZher2 (char uplo, int n, cuDoubleComplex alpha, const cuDoubl | | | |
| eComplex *x, int incx, | | | |
| * const cuDoubleComplex *y, int incy, cuDoubleComplex *A | | | |
| , int lda) | | | |
| * | | | |
| * performs the hermitian rank 2 operation | | | |
| * | | | |
| * A = alpha*x*conjugate(transpose(y)) + conjugate(alpha)*y*conjugate(tr | | | |
| anspose(x)) + A, | | | |
| * | | | |
| * where alpha is a double precision complex scalar, x and y are n element | | | |
| double | | | |
| * precision complex vector and A is an n by n hermitian matrix consisting | | | |
| of double | | | |
| * precision complex elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the l | | | |
| ower | | | |
| * triangular part of array A. If uplo == 'U' or 'u', then only the | | | |
| * upper triangular part of A may be referenced and the lower triang | | | |
| ular | | | |
| * part of A is inferred. If uplo == 'L' or 'l', then only the lower | | | |
| * triangular part of A may be referenced and the upper triangular p | | | |
| art | | | |
| * of A is inferred. | | | |
| * n specifies the number of rows and columns of the matrix A. It must | | | |
| be | | | |
| * at least zero. | | | |
| * alpha double precision complex scalar multiplier applied to x * conjuga | | | |
| te(transpose(y)) + | | | |
| * y * conjugate(transpose(x)). | | | |
| * x double precision array of length at least (1 + (n - 1) * abs (inc | | | |
| x)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * y double precision array of length at least (1 + (n - 1) * abs (inc | | | |
| y)). | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * A double precision complex array of dimensions (lda, n). If uplo == | | | |
| 'U' or 'u', | | | |
| * then A must contains the upper triangular part of a hermitian mat | | | |
| rix, | | | |
| * and the strictly lower triangular parts is not referenced. If upl | | | |
| o == | | | |
| * 'L' or 'l', then A contains the lower triangular part of a hermit | | | |
| ian | | | |
| * matrix, and the strictly upper triangular part is not referenced. | | | |
| * The imaginary parts of the diagonal elements need not be set, | | | |
| * they are assumed to be zero, and on exit they are set to zero. | | | |
| * | | | |
| * lda leading dimension of A. It must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha*x*conjugate(transpose(y)) | | | |
| * + conjugate(alpha)*y*conjugate(transpose(x | | | |
| ))+A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zher2.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZher2 (char uplo, int n, cuDoubleComplex alpha, | | | |
| const cuDoubleComplex *x, int incx, const cuDo | | | |
| ubleComplex *y, | | | |
| int incy, cuDoubleComplex *A, int lda); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasDsyr2k (char uplo, char trans, int n, int k, double alpha, | | | |
| * const double *A, int lda, const double *B, int ldb, | | | |
| * double beta, double *C, int ldc) | | | |
| * | | | |
| * performs one of the symmetric rank 2k operations | | | |
| * | | | |
| * C = alpha * A * transpose(B) + alpha * B * transpose(A) + beta * C, o | | | |
| r | | | |
| * C = alpha * transpose(A) * B + alpha * transpose(B) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are double precision scalars. C is an n x n symmetric mat | | | |
| rix | | | |
| * consisting of double precision elements and stored in either lower or up | | | |
| per | | | |
| * storage mode. A and B are matrices consisting of double precision elemen | | | |
| ts | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the symmetric matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the symmetric matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the symmetric matrix is to be references | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| * C = alpha * A * transpose(B) + alpha * B * transpose(A) + beta * | | | |
| C, | | | |
| * If trans == 'T', 't', 'C', or 'c', C = alpha * transpose(A) * B + | | | |
| * alpha * transpose(B) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of rows of matrix | | | |
| A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha double precision scalar multiplier. | | | |
| * A double precision array of dimensions (lda, ka), where ka is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1,k). | | | |
| * B double precision array of dimensions (lda, kb), where kb is k whe | | | |
| n | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array B must contain the matrix B, | | | |
| * otherwise the leading k x n part of the array must contain the ma | | | |
| trix | | | |
| * B. | | | |
| * ldb leading dimension of N. When trans == 'N' or 'n' then ldb must be | | | |
| at | | | |
| * least max(1, n). Otherwise ldb must be at least max(1, k). | | | |
| * beta double precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input. | | | |
| * C double precision array of dimensions (ldc, n). If uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the symmetric matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo == 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the symmetric matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * ldc leading dimension of C. Must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to alpha*A*transpose(B) + alpha*B*transpose(A) | | | |
| + | | | |
| * beta*C or alpha*transpose(A)*B + alpha*transpose(B)*A + beta*C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/dsyr2k.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasDsyr2k (char uplo, char trans, int n, int k, | | | |
| double alpha, const double *A, int lda, | | | |
| const double *B, int ldb, double beta, | | | |
| double *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void cublasZgemm (char transa, char transb, int m, int n, int k, | | | |
| * cuDoubleComplex alpha, const cuDoubleComplex *A, int l | | | |
| da, | | | |
| * const cuDoubleComplex *B, int ldb, cuDoubleComplex bet | | | |
| a, | | | |
| * cuDoubleComplex *C, int ldc) | | | |
| * | | | |
| * zgemm performs one of the matrix-matrix operations | | | |
| * | | | |
| * C = alpha * op(A) * op(B) + beta*C, | | | |
| * | | | |
| * where op(X) is one of | | | |
| * | | | |
| * op(X) = X or op(X) = transpose or op(X) = conjg(transpose(X)) | | | |
| * | | | |
| * alpha and beta are double-complex scalars, and A, B and C are matrices | | | |
| * consisting of double-complex elements, with op(A) an m x k matrix, op(B) | | | |
| * a k x n matrix and C an m x n matrix. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * transa specifies op(A). If transa == 'N' or 'n', op(A) = A. If transa == | | | |
| * 'T' or 't', op(A) = transpose(A). If transa == 'C' or 'c', op(A) | | | |
| = | | | |
| * conjg(transpose(A)). | | | |
| * transb specifies op(B). If transa == 'N' or 'n', op(B) = B. If transb == | | | |
| * 'T' or 't', op(B) = transpose(B). If transb == 'C' or 'c', op(B) | | | |
| = | | | |
| * conjg(transpose(B)). | | | |
| * m number of rows of matrix op(A) and rows of matrix C. It must be a | | | |
| t | | | |
| * least zero. | | | |
| * n number of columns of matrix op(B) and number of columns of C. It | | | |
| * must be at least zero. | | | |
| * k number of columns of matrix op(A) and number of rows of op(B). It | | | |
| * must be at least zero. | | | |
| * alpha double-complex scalar multiplier applied to op(A)op(B) | | | |
| * A double-complex array of dimensions (lda, k) if transa == 'N' or | | | |
| * 'n'), and of dimensions (lda, m) otherwise. | | | |
| * lda leading dimension of A. When transa == 'N' or 'n', it must be at | | | |
| * least max(1, m) and at least max(1, k) otherwise. | | | |
| * B double-complex array of dimensions (ldb, n) if transb == 'N' or ' | | | |
| n', | | | |
| * and of dimensions (ldb, k) otherwise | | | |
| * ldb leading dimension of B. When transb == 'N' or 'n', it must be at | | | |
| * least max(1, k) and at least max(1, n) otherwise. | | | |
| * beta double-complex scalar multiplier applied to C. If beta is zero, C | | | |
| * does not have to be a valid input. | | | |
| * C double precision array of dimensions (ldc, n) | | | |
| * ldc leading dimension of C. Must be at least max(1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha*op(A)*op(B) + beta*C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zgemm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if any of m, n, or k are < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZgemm (char transa, char transb, int m, int n, | | | |
| int k, cuDoubleComplex alpha, | | | |
| const cuDoubleComplex *A, int lda, | | | |
| const cuDoubleComplex *B, int ldb, | | | |
| cuDoubleComplex beta, cuDoubleComplex *C, | | | |
| int ldc); | | | |
| | | | |
|
| /* | | } | |
| * void | | static __inline__ void CUBLASAPI cublasZtrmm (char side, char uplo, char tr | |
| * cublasZtrmm (char side, char uplo, char transa, char diag, int m, int n, | | ansa, | |
| * cuDoubleComplex alpha, const cuDoubleComplex *A, int lda, c | | | |
| onst cuDoubleComplex *B, | | | |
| * int ldb) | | | |
| * | | | |
| * performs one of the matrix-matrix operations | | | |
| * | | | |
| * B = alpha * op(A) * B, or B = alpha * B * op(A) | | | |
| * | | | |
| * where alpha is a double-precision complex scalar, B is an m x n matrix c | | | |
| omposed | | | |
| * of double precision complex elements, and A is a unit or non-unit, upper | | | |
| or lower, | | | |
| * triangular matrix composed of double precision complex elements. op(A) i | | | |
| s one of | | | |
| * | | | |
| * op(A) = A , op(A) = transpose(A) or op(A) = conjugate(transpose(A)) | | | |
| * | | | |
| * Matrices A and B are stored in column major format, and lda and ldb are | | | |
| * the leading dimensions of the two-dimensonials arrays that contain A and | | | |
| * B, respectively. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether op(A) multiplies B from the left or right. | | | |
| * If side = 'L' or 'l', then B = alpha * op(A) * B. If side = | | | |
| * 'R' or 'r', then B = alpha * B * op(A). | | | |
| * uplo specifies whether the matrix A is an upper or lower triangular | | | |
| * matrix. If uplo = 'U' or 'u', A is an upper triangular matrix. | | | |
| * If uplo = 'L' or 'l', A is a lower triangular matrix. | | | |
| * transa specifies the form of op(A) to be used in the matrix | | | |
| * multiplication. If transa = 'N' or 'n', then op(A) = A. If | | | |
| * transa = 'T' or 't', then op(A) = transpose(A). | | | |
| * If transa = 'C' or 'c', then op(A) = conjugate(transpose(A)). | | | |
| * diag specifies whether or not A is unit triangular. If diag = 'U' | | | |
| * or 'u', A is assumed to be unit triangular. If diag = 'N' or | | | |
| * 'n', A is not assumed to be unit triangular. | | | |
| * m the number of rows of matrix B. m must be at least zero. | | | |
| * n the number of columns of matrix B. n must be at least zero. | | | |
| * alpha double precision complex scalar multiplier applied to op(A)*B, or | | | |
| * B*op(A), respectively. If alpha is zero no accesses are made | | | |
| * to matrix A, and no read accesses are made to matrix B. | | | |
| * A double precision complex array of dimensions (lda, k). k = m if s | | | |
| ide = | | | |
| * 'L' or 'l', k = n if side = 'R' or 'r'. If uplo = 'U' or 'u' | | | |
| * the leading k x k upper triangular part of the array A must | | | |
| * contain the upper triangular matrix, and the strictly lower | | | |
| * triangular part of A is not referenced. If uplo = 'L' or 'l' | | | |
| * the leading k x k lower triangular part of the array A must | | | |
| * contain the lower triangular matrix, and the strictly upper | | | |
| * triangular part of A is not referenced. When diag = 'U' or 'u' | | | |
| * the diagonal elements of A are no referenced and are assumed | | | |
| * to be unity. | | | |
| * lda leading dimension of A. When side = 'L' or 'l', it must be at | | | |
| * least max(1,m) and at least max(1,n) otherwise | | | |
| * B double precision complex array of dimensions (ldb, n). On entry, | | | |
| the | | | |
| * leading m x n part of the array contains the matrix B. It is | | | |
| * overwritten with the transformed matrix on exit. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * B updated according to B = alpha * op(A) * B or B = alpha * B * op | | | |
| (A) | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ztrmm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZtrmm (char side, char uplo, char transa, | | | |
| char diag, int m, int n, cuDoubleComplex alpha, | | char diag, int m, int n, cuDoubleComplex alpha, | |
| const cuDoubleComplex *A, int lda, cuDoubleComp
lex *B, | | const cuDoubleComplex *A, int lda, cuDoubleComp
lex *B, | |
|
| int ldb); | | int ldb) | |
| | | { | |
| /* | | cublasHandle_t handle = cublasGetCurrentCtx(); | |
| * cublasZgeru (int m, int n, cuDoubleComplex alpha, const cuDoubleComplex | | cublasStatus_t error = cublasZtrmm_v2(handle, convertToSideMode(side),c | |
| *x, int incx, | | onvertToFillMode(uplo), | |
| * const cuDoubleComplex *y, int incy, cuDoubleComplex *A, int | | convertToOp(transa), convertToDiagType(d | |
| lda) | | iag), m, n, &alpha, A, lda, B, ldb, B, ldb ); | |
| * | | cublasSetError(error); | |
| * performs the symmetric rank 1 operation | | | |
| * | | | |
| * A = alpha * x * transpose(y) + A, | | | |
| * | | | |
| * where alpha is a double precision complex scalar, x is an m element doub | | | |
| le | | | |
| * precision complex vector, y is an n element double precision complex vec | | | |
| tor, and A | | | |
| * is an m by n matrix consisting of double precision complex elements. Mat | | | |
| rix A | | | |
| * is stored in column major format, and lda is the leading dimension of | | | |
| * the two-dimensional array used to store A. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * m specifies the number of rows of the matrix A. It must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. It must be at | | | |
| * least zero. | | | |
| * alpha double precision complex scalar multiplier applied to x * transpo | | | |
| se(y) | | | |
| * x double precision complex array of length at least (1 + (m - 1) * | | | |
| abs(incx)) | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * y double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incy)) | | | |
| * incy specifies the storage spacing between elements of y. incy must no | | | |
| t | | | |
| * be zero. | | | |
| * A double precision complex array of dimensions (lda, n). | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * transpose(y) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zgeru.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m < 0, n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZgeru (int m, int n, cuDoubleComplex alpha, | | | |
| const cuDoubleComplex *x, int incx, const cuDou | | | |
| bleComplex *y, | | | |
| int incy, cuDoubleComplex *A, int lda); | | | |
| | | | |
| /* | | | |
| * cublasZgerc (int m, int n, cuDoubleComplex alpha, const cuDoubleComplex | | | |
| *x, int incx, | | | |
| * const cuDoubleComplex *y, int incy, cuDoubleComplex *A, int | | | |
| lda) | | | |
| * | | | |
| * performs the symmetric rank 1 operation | | | |
| * | | | |
| * A = alpha * x * conjugate(transpose(y)) + A, | | | |
| * | | | |
| * where alpha is a double precision complex scalar, x is an m element doub | | | |
| le | | | |
| * precision complex vector, y is an n element double precision complex vec | | | |
| tor, and A | | | |
| * is an m by n matrix consisting of double precision complex elements. Mat | | | |
| rix A | | | |
| * is stored in column major format, and lda is the leading dimension of | | | |
| * the two-dimensional array used to store A. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * m specifies the number of rows of the matrix A. It must be at least | | | |
| * zero. | | | |
| * n specifies the number of columns of the matrix A. It must be at | | | |
| * least zero. | | | |
| * alpha double precision complex scalar multiplier applied to x * conjuga | | | |
| te(transpose(y)) | | | |
| * x double precision array of length at least (1 + (m - 1) * abs(incx | | | |
| )) | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * y double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incy)) | | | |
| * incy specifies the storage spacing between elements of y. incy must no | | | |
| t | | | |
| * be zero. | | | |
| * A double precision complex array of dimensions (lda, n). | | | |
| * lda leading dimension of two-dimensional array used to store matrix A | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * A updated according to A = alpha * x * conjugate(transpose(y)) + A | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zgerc.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m < 0, n < 0, incx == 0, incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZgerc (int m, int n, cuDoubleComplex alpha, | | | |
| const cuDoubleComplex *x, int incx, const cuDou | | | |
| bleComplex *y, | | | |
| int incy, cuDoubleComplex *A, int lda); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZherk (char uplo, char trans, int n, int k, double alpha, | | | |
| * const cuDoubleComplex *A, int lda, double beta, cuDoubleCom | | | |
| plex *C, int ldc) | | | |
| * | | | |
| * performs one of the hermitian rank k operations | | | |
| * | | | |
| * C = alpha * A * conjugate(transpose(A)) + beta * C, or | | | |
| * C = alpha * conjugate(transpose(A)) * A + beta * C. | | | |
| * | | | |
| * Alpha and beta are double precision scalars. C is an n x n hermitian mat | | | |
| rix | | | |
| * consisting of double precision complex elements and stored in either low | | | |
| er or | | | |
| * upper storage mode. A is a matrix consisting of double precision complex | | | |
| elements | | | |
| * with dimension of n x k in the first case, and k x n in the second case. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the hermitian matrix C is stored in upper or lo | | | |
| wer | | | |
| * storage mode as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the hermitian matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the hermitian matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * trans specifies the operation to be performed. If trans == 'N' or 'n', | | | |
| C = | | | |
| * alpha * A * conjugate(transpose(A)) + beta * C. If trans == 'T', | | | |
| 't', 'C', or 'c', | | | |
| * C = alpha * conjugate(transpose(A)) * A + beta * C. | | | |
| * n specifies the number of rows and the number columns of matrix C. | | | |
| If | | | |
| * trans == 'N' or 'n', n specifies the number of rows of matrix A. | | | |
| If | | | |
| * trans == 'T', 't', 'C', or 'c', n specifies the columns of matrix | | | |
| A. | | | |
| * n must be at least zero. | | | |
| * k If trans == 'N' or 'n', k specifies the number of columns of matr | | | |
| ix A. | | | |
| * If trans == 'T', 't', 'C', or 'c', k specifies the number of rows | | | |
| of | | | |
| * matrix A. k must be at least zero. | | | |
| * alpha double precision scalar multiplier applied to A * conjugate(trans | | | |
| pose(A)) or | | | |
| * conjugate(transpose(A)) * A. | | | |
| * A double precision complex array of dimensions (lda, ka), where ka | | | |
| is k when | | | |
| * trans == 'N' or 'n', and is n otherwise. When trans == 'N' or 'n' | | | |
| , | | | |
| * the leading n x k part of array A must contain the matrix A, | | | |
| * otherwise the leading k x n part of the array must contains the | | | |
| * matrix A. | | | |
| * lda leading dimension of A. When trans == 'N' or 'n' then lda must be | | | |
| at | | | |
| * least max(1, n). Otherwise lda must be at least max(1, k). | | | |
| * beta double precision scalar multiplier applied to C. If beta is zero, | | | |
| C | | | |
| * does not have to be a valid input | | | |
| * C double precision complex array of dimensions (ldc, n). If uplo = | | | |
| 'U' or 'u', | | | |
| * the leading n x n triangular part of the array C must contain the | | | |
| * upper triangular part of the hermitian matrix C and the strictly | | | |
| * lower triangular part of C is not referenced. On exit, the upper | | | |
| * triangular part of C is overwritten by the upper triangular part | | | |
| of | | | |
| * the updated matrix. If uplo = 'L' or 'l', the leading n x n | | | |
| * triangular part of the array C must contain the lower triangular | | | |
| part | | | |
| * of the hermitian matrix C and the strictly upper triangular part | | | |
| of C | | | |
| * is not referenced. On exit, the lower triangular part of C is | | | |
| * overwritten by the lower triangular part of the updated matrix. | | | |
| * The imaginary parts of the diagonal elements need | | | |
| * not be set, they are assumed to be zero, and on exit they | | | |
| * are set to zero. | | | |
| * ldc leading dimension of C. It must be at least max(1, n). | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * conjugate(transpose(A)) + be | | | |
| ta * C, or C = | | | |
| * alpha * conjugate(transpose(A)) * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zherk.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if n < 0 or k < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZherk (char uplo, char trans, int n, int k, | | | |
| double alpha, | | | |
| const cuDoubleComplex *A, int lda, | | | |
| double beta, | | | |
| cuDoubleComplex *C, int ldc); | | | |
| | | | |
| /* | | | |
| * void | | | |
| * cublasZhemm (char side, char uplo, int m, int n, cuDoubleComplex alpha, | | | |
| * const cuDoubleComplex *A, int lda, const cuDoubleComplex *B | | | |
| , int ldb, | | | |
| * cuDoubleComplex beta, cuDoubleComplex *C, int ldc); | | | |
| * | | | |
| * performs one of the matrix-matrix operations | | | |
| * | | | |
| * C = alpha * A * B + beta * C, or | | | |
| * C = alpha * B * A + beta * C, | | | |
| * | | | |
| * where alpha and beta are double precision complex scalars, A is a hermit | | | |
| ian matrix | | | |
| * consisting of double precision complex elements and stored in either low | | | |
| er or upper | | | |
| * storage mode, and B and C are m x n matrices consisting of double precis | | | |
| ion | | | |
| * complex elements. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * side specifies whether the hermitian matrix A appears on the left side | | | |
| * hand side or right hand side of matrix B, as follows. If side == | | | |
| 'L' | | | |
| * or 'l', then C = alpha * A * B + beta * C. If side = 'R' or 'r', | | | |
| * then C = alpha * B * A + beta * C. | | | |
| * uplo specifies whether the hermitian matrix A is stored in upper or lo | | | |
| wer | | | |
| * storage mode, as follows. If uplo == 'U' or 'u', only the upper | | | |
| * triangular part of the hermitian matrix is to be referenced, and | | | |
| the | | | |
| * elements of the strictly lower triangular part are to be infered | | | |
| from | | | |
| * those in the upper triangular part. If uplo == 'L' or 'l', only t | | | |
| he | | | |
| * lower triangular part of the hermitian matrix is to be referenced | | | |
| , | | | |
| * and the elements of the strictly upper triangular part are to be | | | |
| * infered from those in the lower triangular part. | | | |
| * m specifies the number of rows of the matrix C, and the number of r | | | |
| ows | | | |
| * of matrix B. It also specifies the dimensions of hermitian matrix | | | |
| A | | | |
| * when side == 'L' or 'l'. m must be at least zero. | | | |
| * n specifies the number of columns of the matrix C, and the number o | | | |
| f | | | |
| * columns of matrix B. It also specifies the dimensions of hermitia | | | |
| n | | | |
| * matrix A when side == 'R' or 'r'. n must be at least zero. | | | |
| * alpha double precision scalar multiplier applied to A * B, or B * A | | | |
| * A double precision complex array of dimensions (lda, ka), where ka | | | |
| is m when | | | |
| * side == 'L' or 'l' and is n otherwise. If side == 'L' or 'l' the | | | |
| * leading m x m part of array A must contain the hermitian matrix, | | | |
| * such that when uplo == 'U' or 'u', the leading m x m part stores | | | |
| the | | | |
| * upper triangular part of the hermitian matrix, and the strictly l | | | |
| ower | | | |
| * triangular part of A is not referenced, and when uplo == 'U' or ' | | | |
| u', | | | |
| * the leading m x m part stores the lower triangular part of the | | | |
| * hermitian matrix and the strictly upper triangular part is not | | | |
| * referenced. If side == 'R' or 'r' the leading n x n part of array | | | |
| A | | | |
| * must contain the hermitian matrix, such that when uplo == 'U' or | | | |
| 'u', | | | |
| * the leading n x n part stores the upper triangular part of the | | | |
| * hermitian matrix and the strictly lower triangular part of A is n | | | |
| ot | | | |
| * referenced, and when uplo == 'U' or 'u', the leading n x n part | | | |
| * stores the lower triangular part of the hermitian matrix and the | | | |
| * strictly upper triangular part is not referenced. The imaginary p | | | |
| arts | | | |
| * of the diagonal elements need not be set, they are assumed to be | | | |
| zero. | | | |
| * | | | |
| * lda leading dimension of A. When side == 'L' or 'l', it must be at le | | | |
| ast | | | |
| * max(1, m) and at least max(1, n) otherwise. | | | |
| * B double precision complex array of dimensions (ldb, n). On entry, | | | |
| the leading | | | |
| * m x n part of the array contains the matrix B. | | | |
| * ldb leading dimension of B. It must be at least max (1, m). | | | |
| * beta double precision complex scalar multiplier applied to C. If beta | | | |
| is zero, C | | | |
| * does not have to be a valid input | | | |
| * C double precision complex array of dimensions (ldc, n) | | | |
| * ldc leading dimension of C. Must be at least max(1, m) | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * C updated according to C = alpha * A * B + beta * C, or C = alpha * | | | |
| * B * A + beta * C | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zhemm.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if m or n are < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZhemm (char side, char uplo, int m, int n, | | | |
| cuDoubleComplex alpha, const cuDoubleComplex *A | | | |
| , int lda, | | | |
| const cuDoubleComplex *B, int ldb, cuDoubleComp | | | |
| lex beta, | | | |
| cuDoubleComplex *C, int ldc); | | | |
| | | | |
|
| /* | | } | |
| * void | | | |
| * cublasZtrsv (char uplo, char trans, char diag, int n, const cuDoubleComp | | | |
| lex *A, | | | |
| * int lda, cuDoubleComplex *x, int incx) | | | |
| * | | | |
| * solves a system of equations op(A) * x = b, where op(A) is either A, | | | |
| * transpose(A) or conjugate(transpose(A)). b and x are double precision | | | |
| * complex vectors consisting of n elements, and A is an n x n matrix | | | |
| * composed of a unit or non-unit, upper or lower triangular matrix. | | | |
| * Matrix A is stored in column major format, and lda is the leading | | | |
| * dimension of the two-dimensional array containing A. | | | |
| * | | | |
| * No test for singularity or near-singularity is included in this function | | | |
| . | | | |
| * Such tests must be performed before calling this function. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the matrix data is stored in the upper or the | | | |
| * lower triangular part of array A. If uplo = 'U' or 'u', then only | | | |
| * the upper triangular part of A may be referenced. If uplo = 'L' o | | | |
| r | | | |
| * 'l', then only the lower triangular part of A may be referenced. | | | |
| * trans specifies op(A). If transa = 'n' or 'N', op(A) = A. If transa = ' | | | |
| t', | | | |
| * 'T', 'c', or 'C', op(A) = transpose(A) | | | |
| * diag specifies whether or not A is a unit triangular matrix like so: | | | |
| * if diag = 'U' or 'u', A is assumed to be unit triangular. If | | | |
| * diag = 'N' or 'n', then A is not assumed to be unit triangular. | | | |
| * n specifies the number of rows and columns of the matrix A. It | | | |
| * must be at least 0. | | | |
| * A is a double precision complex array of dimensions (lda, n). If up | | | |
| lo = 'U' | | | |
| * or 'u', then A must contains the upper triangular part of a symme | | | |
| tric | | | |
| * matrix, and the strictly lower triangular parts is not referenced | | | |
| . | | | |
| * If uplo = 'L' or 'l', then A contains the lower triangular part o | | | |
| f | | | |
| * a symmetric matrix, and the strictly upper triangular part is not | | | |
| * referenced. | | | |
| * lda is the leading dimension of the two-dimensional array containing | | | |
| A. | | | |
| * lda must be at least max(1, n). | | | |
| * x double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * On entry, x contains the n element right-hand side vector b. On e | | | |
| xit, | | | |
| * it is overwritten with the solution vector x. | | | |
| * incx specifies the storage spacing between elements of x. incx must no | | | |
| t | | | |
| * be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * x updated to contain the solution vector x that solves op(A) * x = | | | |
| b. | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/ztrsv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if incx == 0 or if n < 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZtrsv (char uplo, char trans, char diag, int n, | | | |
| const cuDoubleComplex *A, int lda, cuDoubleComp | | | |
| lex *x, | | | |
| int incx); | | | |
| | | | |
|
| /* | | | |
| * void | | | |
| * cublasZhbmv (char uplo, int n, int k, cuDoubleComplex alpha, const cuDou | | | |
| bleComplex *A, int lda, | | | |
| * const cuDoubleComplex *x, int incx, cuDoubleComplex beta, c | | | |
| uDoubleComplex *y, int incy) | | | |
| * | | | |
| * performs the matrix-vector operation | | | |
| * | | | |
| * y := alpha*A*x + beta*y | | | |
| * | | | |
| * alpha and beta are double precision complex scalars. x and y are double | | | |
| precision | | | |
| * complex vectors with n elements. A is an n by n hermitian band matrix co | | | |
| nsisting | | | |
| * of double precision complex elements, with k super-diagonals and the sam | | | |
| e number | | | |
| * of subdiagonals. | | | |
| * | | | |
| * Input | | | |
| * ----- | | | |
| * uplo specifies whether the upper or lower triangular part of the hermi | | | |
| tian | | | |
| * band matrix A is being supplied. If uplo == 'U' or 'u', the upper | | | |
| * triangular part is being supplied. If uplo == 'L' or 'l', the low | | | |
| er | | | |
| * triangular part is being supplied. | | | |
| * n specifies the number of rows and the number of columns of the | | | |
| * hermitian matrix A. n must be at least zero. | | | |
| * k specifies the number of super-diagonals of matrix A. Since the ma | | | |
| trix | | | |
| * is hermitian, this is also the number of sub-diagonals. k must be | | | |
| at | | | |
| * least zero. | | | |
| * alpha double precision complex scalar multiplier applied to A*x. | | | |
| * A double precision complex array of dimensions (lda, n). When uplo | | | |
| == 'U' or | | | |
| * 'u', the leading (k + 1) x n part of array A must contain the upp | | | |
| er | | | |
| * triangular band of the hermitian matrix, supplied column by colum | | | |
| n, | | | |
| * with the leading diagonal of the matrix in row (k+1) of the array | | | |
| , | | | |
| * the first super-diagonal starting at position 2 in row k, and so | | | |
| on. | | | |
| * The top left k x k triangle of the array A is not referenced. Whe | | | |
| n | | | |
| * uplo == 'L' or 'l', the leading (k + 1) x n part of the array A m | | | |
| ust | | | |
| * contain the lower triangular band part of the hermitian matrix, | | | |
| * supplied column by column, with the leading diagonal of the matri | | | |
| x in | | | |
| * row 1 of the array, the first sub-diagonal starting at position 1 | | | |
| in | | | |
| * row 2, and so on. The bottom right k x k triangle of the array A | | | |
| is | | | |
| * not referenced. The imaginary parts of the diagonal elements need | | | |
| * not be set, they are assumed to be zero. | | | |
| * lda leading dimension of A. lda must be at least (k + 1). | | | |
| * x double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incx)). | | | |
| * incx storage spacing between elements of x. incx must not be zero. | | | |
| * beta double precision complex scalar multiplier applied to vector y. I | | | |
| f beta is | | | |
| * zero, y is not read. | | | |
| * y double precision complex array of length at least (1 + (n - 1) * | | | |
| abs(incy)). | | | |
| * If beta is zero, y is not read. | | | |
| * incy storage spacing between elements of y. incy must not be zero. | | | |
| * | | | |
| * Output | | | |
| * ------ | | | |
| * y updated according to alpha*A*x + beta*y | | | |
| * | | | |
| * Reference: http://www.netlib.org/blas/zhbmv.f | | | |
| * | | | |
| * Error status for this function can be retrieved via cublasGetError(). | | | |
| * | | | |
| * Error Status | | | |
| * ------------ | | | |
| * CUBLAS_STATUS_NOT_INITIALIZED if CUBLAS library has not been initialize | | | |
| d | | | |
| * CUBLAS_STATUS_INVALID_VALUE if k or n < 0, or if incx or incy == 0 | | | |
| * CUBLAS_STATUS_ARCH_MISMATCH if invoked on device without DP support | | | |
| * CUBLAS_STATUS_EXECUTION_FAILED if function failed to launch on GPU | | | |
| */ | | | |
| void CUBLASAPI cublasZhbmv (char uplo, int n, int k, cuDoubleComplex alpha, | | | |
| const cuDoubleComplex *A, int lda, const cuDoub | | | |
| leComplex *x, | | | |
| int incx, cuDoubleComplex beta, cuDoubleComplex | | | |
| *y, int incy); | | | |
| #if defined(__cplusplus) | | #if defined(__cplusplus) | |
| } | | } | |
| #endif /* __cplusplus */ | | #endif /* __cplusplus */ | |
| | | | |
| #endif /* !defined(CUBLAS_H_) */ | | #endif /* !defined(CUBLAS_H_) */ | |
| | | | |
End of changes. 95 change blocks. |
| 10240 lines changed or deleted | | 1844 lines changed or added | |
|
| curand_precalc.h | | curand_precalc.h | |
| | | | |
| skipping to change at line 17 | | skipping to change at line 17 | |
| #define XORWOW_SEQUENCE_SPACING (67) | | #define XORWOW_SEQUENCE_SPACING (67) | |
| | | | |
| static __constant__ unsigned int precalc_xorwow_matrix[8][800] = { | | static __constant__ unsigned int precalc_xorwow_matrix[8][800] = { | |
| {850664906UL, 2293210629UL, 1517805917UL, 1215500405UL, 1612415445UL, 64538
8200UL, 824349799UL, 3517232886UL, 4075591755UL, 3089899292UL, 4249786064UL
, 3811424903UL, 1100783479UL, 53649761UL, 2817264826UL, 3159462529UL, 16548
48550UL, 950025444UL, 3095510002UL, 4080567211UL, 4111078399UL, 3241719305U
L, 2788212779UL, 4256963770UL, 2426893717UL, 4190211142UL, 1420776905UL, 37
80537969UL, 1102912875UL, 1657948873UL, 3354905256UL, 2519610308UL, 5157776
63UL, 3396785394UL, 1832603711UL, 1154211550UL, 1915690212UL, 1933919046UL,
789578337UL, 337961173UL, 1359089498UL, 2249086205UL, 3417955173UL, 862571
348UL, 528120760UL, 1265685672UL, 1970052076UL, 3585976752UL, 3645339918UL,
312171257UL, 1360991400UL, 1994321680UL, 2327168468UL, 2540437053UL, 11804
83641UL, 2217962701UL, 182726833UL, 590204372UL, 1904496495UL, 2545607041UL
, 3697978033UL, 1084030545UL, 3397906968UL, 2192325323UL, 2704204176UL, 106
9092002UL, 2364406907UL, 1578647245UL, 3561974633UL, 3437665426UL, 14641273
05UL, 1616628807UL, 2243114101UL, 3639967880UL, 1702613633UL, 2437350057UL,
39991274UL, 2024323584UL, 3795072940UL, 3604530798UL, 443099203UL, 6435362
12UL, 1919517328UL, 3931285769UL, 427935569UL, 276421624UL, 2492081750UL, 2
62729512UL, 3088549877UL, 2922650665UL, 1816283755UL, 4246096489UL, 8425759
14UL, 1460435650UL, 3050522190UL, 2640849794UL, 3697925816UL, 3465779075UL,
3856929655UL, 1365559780UL, 2897029415UL, 2747033756UL, 3611830629UL, 1891
542518UL, 1897590206UL, 437451803UL, 677924906UL, 123809117UL, 3940574372UL
, 687640291UL, 3488484529UL, 470218446UL, 1092571016UL, 1537938503UL, 10733
23937UL, 611300083UL, 3809285994UL, 3975678726UL, 925845389UL, 2514775760UL
, 2859302390UL, 2761919483UL, 993285307UL, 164095287UL, 3736193671UL, 20789
46336UL, 1418537059UL, 1202525920UL, 4234029440UL, 1313593624UL, 2484428922
UL, 1833969372UL, 661495122UL, 2217907395UL, 2795045321UL, 2950835531UL, 14
02379354UL, 351314168UL, 1902476749UL, 1914974334UL, 2873973176UL, 13212036
03UL, 3316118265UL, 3282193947UL, 1342191737UL, 793441242UL, 3281524559UL,
296088733UL, 487851702UL, 712098215UL, 1388727135UL, 1705533557UL, 35578002
92UL, 399729516UL, 1355829467UL, 291276309UL, 421164833UL, 1318404599UL, 20
64519128UL, 1161612642UL, 2076623594UL, 850664906UL, 2293210629UL, 15178059
17UL, 1215500405UL, 3847487204UL, 645388200UL, 824349799UL, 3517232886UL, 4
075591755UL, 2755872609UL, 4249786064UL, 3811424903UL, 1100783479UL, 536497
61UL, 1417544262UL, 3159462529UL, 1654848550UL, 950025444UL, 3095510002UL,
1908900347UL, 4111078399UL, 3241719305UL, 2788212779UL, 4256963770UL, 37502
58343UL, 4190211142UL, 1420776905UL, 3780537969UL, 1102912875UL, 1690550UL,
3354905256UL, 2519610308UL, 515777663UL, 3396785394UL, 2658162202UL, 11542
11550UL, 1915690212UL, 1933919046UL, 789578337UL, 189880016UL, 1359089498UL
, 2249086205UL, 3417955173UL, 862571348UL, 998719835UL, 1265685672UL, 19700
52076UL, 3585976752UL, 3645339918UL, 2973042959UL, 1360991400UL, 1994321680
UL, 2327168468UL, 2540437053UL, 2283905032UL, 2217962701UL, 182726833UL, 59
0204372UL, 1904496495UL, 110719262UL, 3697978033UL, 1084030545UL, 339790696
8UL, 2192325323UL, 4133333579UL, 1069092002UL, 2364406907UL, 1578647245UL,
3561974633UL, 3629845331UL, 1464127305UL, 1616628807UL, 2243114101UL, 36399
67880UL, 3256744141UL, 2437350057UL, 39991274UL, 2024323584UL, 3795072940UL
, 1024703328UL, 443099203UL, 643536212UL, 1919517328UL, 3931285769UL, 27551
67056UL, 276421624UL, 2492081750UL, 262729512UL, 3088549877UL, 2817867653UL
, 1816283755UL, 4246096489UL, 842575914UL, 1460435650UL, 2276077438UL, 2640
849794UL, 3697925816UL, 3465779075UL, 3856929655UL, 130551477UL, 2897029415
UL, 2747033756UL, 3611830629UL, 1891542518UL, 804565809UL, 437451803UL, 677
924906UL, 123809117UL, 3940574372UL, 2446610749UL, 3488484529UL, 470218446U
L, 1092571016UL, 1537938503UL, 1502147484UL, 611300083UL, 3809285994UL, 397
5678726UL, 925845389UL, 872826112UL, 2859302390UL, 2761919483UL, 993285307U
L, 164095287UL, 3901654538UL, 2078946336UL, 1418537059UL, 1202525920UL, 423
4029440UL, 704759480UL, 2484428922UL, 1833969372UL, 661495122UL, 2217907395
UL, 3287413716UL, 2950835531UL, 1402379354UL, 351314168UL, 1902476749UL, 20
33316109UL, 2873973176UL, 1321203603UL, 3316118265UL, 3282193947UL, 1316780
684UL, 793441242UL, 3281524559UL, 296088733UL, 487851702UL, 314311643UL, 13
88727135UL, 1705533557UL, 3557800292UL, 399729516UL, 1660074989UL, 29127630
9UL, 421164833UL, 1318404599UL, 2064519128UL, 3156334112UL, 2076623594UL, 8
50664906UL, 2293210629UL, 1517805917UL, 335452425UL, 3847487204UL, 64538820
0UL, 824349799UL, 3517232886UL, 954487767UL, 2755872609UL, 4249786064UL, 38
11424903UL, 1100783479UL, 3408594583UL, 1417544262UL, 3159462529UL, 1654848
550UL, 950025444UL, 324339737UL, 1908900347UL, 4111078399UL, 3241719305UL,
2788212779UL, 1890540205UL, 3750258343UL, 4190211142UL, 1420776905UL, 37805
37969UL, 3716648585UL, 1690550UL, 3354905256UL, 2519610308UL, 515777663UL,
3758156132UL, 2658162202UL, 1154211550UL, 1915690212UL, 1933919046UL, 84414
9171UL, 189880016UL, 1359089498UL, 2249086205UL, 3417955173UL, 1031812215UL
, 998719835UL, 1265685672UL, 1970052076UL, 3585976752UL, 3174204115UL, 2973
042959UL, 1360991400UL, 1994321680UL, 2327168468UL, 714016907UL, 2283905032
UL, 2217962701UL, 182726833UL, 590204372UL, 2151450260UL, 110719262UL, 3697
978033UL, 1084030545UL, 3397906968UL, 767772303UL, 4133333579UL, 1069092002
UL, 2364406907UL, 1578647245UL, 42955292UL, 3629845331UL, 1464127305UL, 161
6628807UL, 2243114101UL, 3222189776UL, 3256744141UL, 2437350057UL, 39991274
UL, 2024323584UL, 3142424684UL, 1024703328UL, 443099203UL, 643536212UL, 191
9517328UL, 918511196UL, 2755167056UL, 276421624UL, 2492081750UL, 262729512U
L, 4246877536UL, 2817867653UL, 1816283755UL, 4246096489UL, 842575914UL, 142
5765936UL, 2276077438UL, 2640849794UL, 3697925816UL, 3465779075UL, 14917025
26UL, 130551477UL, 2897029415UL, 2747033756UL, 3611830629UL, 1844578694UL,
804565809UL, 437451803UL, 677924906UL, 123809117UL, 3419189841UL, 244661074
9UL, 3488484529UL, 470218446UL, 1092571016UL, 3272535988UL, 1502147484UL, 6
11300083UL, 3809285994UL, 3975678726UL, 2853681168UL, 872826112UL, 28593023
90UL, 2761919483UL, 993285307UL, 1434560128UL, 3901654538UL, 2078946336UL,
1418537059UL, 1202525920UL, 2530097881UL, 704759480UL, 2484428922UL, 183396
9372UL, 661495122UL, 503878844UL, 3287413716UL, 2950835531UL, 1402379354UL,
351314168UL, 4131886119UL, 2033316109UL, 2873973176UL, 1321203603UL, 33161
18265UL, 237900321UL, 1316780684UL, 793441242UL, 3281524559UL, 296088733UL,
1730738847UL, 314311643UL, 1388727135UL, 1705533557UL, 3557800292UL, 15538
35665UL, 1660074989UL, 291276309UL, 421164833UL, 1318404599UL, 964731488UL,
3156334112UL, 2076623594UL, 850664906UL, 2293210629UL, 1105350579UL, 33545
2425UL, 3847487204UL, 645388200UL, 824349799UL, 2789953706UL, 954487767UL,
2755872609UL, 4249786064UL, 3811424903UL, 3937839949UL, 3408594583UL, 14175
44262UL, 3159462529UL, 1654848550UL, 624060530UL, 324339737UL, 1908900347UL
, 4111078399UL, 3241719305UL, 2294919498UL, 1890540205UL, 3750258343UL, 419
0211142UL, 1420776905UL, 2279133729UL, 3716648585UL, 1690550UL, 3354905256U
L, 2519610308UL, 3563975602UL, 3758156132UL, 2658162202UL, 1154211550UL, 19
15690212UL, 3505586122UL, 844149171UL, 189880016UL, 1359089498UL, 224908620
5UL, 2389487504UL, 1031812215UL, 998719835UL, 1265685672UL, 1970052076UL, 2
798611919UL, 3174204115UL, 2973042959UL, 1360991400UL, 1994321680UL, 168413
4678UL, 714016907UL, 2283905032UL, 2217962701UL, 182726833UL, 1734988742UL,
2151450260UL, 110719262UL, 3697978033UL, 1084030545UL, 159906818UL, 767772
303UL, 4133333579UL, 1069092002UL, 2364406907UL, 1290801202UL, 42955292UL,
3629845331UL, 1464127305UL, 1616628807UL, 987794861UL, 3222189776UL, 325674
4141UL, 2437350057UL, 39991274UL, 3644076751UL, 3142424684UL, 1024703328UL,
443099203UL, 643536212UL, 1487589384UL, 918511196UL, 2755167056UL, 2764216
24UL, 2492081750UL, 137688638UL, 4246877536UL, 2817867653UL, 1816283755UL,
4246096489UL, 1518475380UL, 1425765936UL, 2276077438UL, 2640849794UL, 36979
25816UL, 4226506771UL, 1491702526UL, 130551477UL, 2897029415UL, 2747033756U
L, 2033599579UL, 1844578694UL, 804565809UL, 437451803UL, 677924906UL, 27490
65512UL, 3419189841UL, 2446610749UL, 3488484529UL, 470218446UL, 290444026UL
, 3272535988UL, 1502147484UL, 611300083UL, 3809285994UL, 2546040767UL, 2853
681168UL, 872826112UL, 2859302390UL, 2761919483UL, 4097961150UL, 1434560128
UL, 3901654538UL, 2078946336UL, 1418537059UL, 2725734455UL, 2530097881UL, 7
04759480UL, 2484428922UL, 1833969372UL, 3999408333UL, 503878844UL, 32874137
16UL, 2950835531UL, 1402379354UL, 3861442503UL, 4131886119UL, 2033316109UL,
2873973176UL, 1321203603UL, 1267331405UL, 237900321UL, 1316780684UL, 79344
1242UL, 3281524559UL, 1273427916UL, 1730738847UL, 314311643UL, 1388727135UL
, 1705533557UL, 1474310231UL, 1553835665UL, 1660074989UL, 291276309UL, 4211
64833UL, 3884815658UL, 3088049345UL, 3307042227UL, 3228948601UL, 1717605083
UL, 1864502063UL, 3799516572UL, 2372822470UL, 2691586476UL, 1172840854UL, 1
577099080UL, 870101866UL, 2139291021UL, 406996656UL, 255568268UL, 897760202
UL, 674745664UL, 885214361UL, 3753233375UL, 3015215223UL, 1711461259UL, 324
1363282UL, 2125360928UL, 2493601640UL, 2350228245UL, 3434627328UL, 20956429
63UL, 3360932494UL, 3287396242UL, 4070512427UL, 3415702664UL, 1958354224UL,
3280206940UL, 3929504236UL, 3390499817UL, 4144225735UL, 3621750606UL, 3205
006592UL, 3495743785UL, 269239326UL, 2181299371UL, 2898796651UL, 2613623219
UL, 3988711298UL, 2162437858UL, 949553433UL, 3289670000UL, 3559525307UL, 33
66925567UL, 2112148665UL, 955626393UL, 1790865381UL, 699223558UL, 388958430
1UL, 1020750250UL, 4105283899UL, 2295851818UL, 4045668915UL, 2224770025UL,
766386910UL, 4265157386UL, 89139307UL, 2099710177UL, 1012450874UL, 18754924
46UL, 1927399417UL, 767450812UL, 654474783UL, 4265293038UL, 4041215389UL, 4
102336947UL, 4263617328UL, 2135826340UL, 2317231535UL, 3773895729UL, 403151
111UL, 1400693138UL, 4255050194UL, 755369466UL, 2325764302UL, 2617301159UL,
4165707294UL, 1206304709UL, 2415645397UL, 4276004841UL, 1457022279UL, 6626
60652UL, 795140282UL, 828519889UL, 805830562UL, 1179976369UL, 2212548232UL,
755708248UL, 1034682071UL, 899950902UL, 1906046264UL, 1861009040UL, 310711
525UL, 920739741UL, 2322414272UL, 3179236470UL, 81822135UL, 4111390320UL, 1
800166783UL, 112253014UL, 688771939UL, 1050990794UL, 3124647483UL, 28705217
1UL, 1363630156UL, 3447798279UL, 1405733552UL, 3075862538UL, 1682808202UL,
1595154222UL, 1173705692UL, 680713285UL, 2748212230UL, 568610527UL, 3434965
538UL, 1114942930UL, 2835858745UL, 2575992250UL, 3243355150UL, 2127580225UL
, 1855934450UL, 3915941751UL, 2228679809UL, 1514780124UL, 1506688039UL, 103
3083295UL, 793807083UL, 1120681149UL, 4105670165UL, 3999570340UL, 208302013
1UL, 1213356023UL, 3684882757UL, 3375797774UL, 3577986103UL, 2092046164UL,
2593847443UL, 1826450612UL, 367828409UL, 3198272513UL, 1941316667UL, 943707
510UL, 907134807UL, 2020457947UL, 1462193665UL, 2964617539UL, 4216491663UL,
2625270800UL, 2395371467UL, 3691003028UL, 3659016793UL, 2381847054UL, 3513
105567UL, 3013019506UL, 2731245927UL, }, {1680024716UL, 2112340059UL, 33874
75367UL, 2080916186UL, 1431532386UL, 3907378472UL, 2636491350UL, 2176128529
UL, 2236616671UL, 3736851460UL, 2604001339UL, 3893075234UL, 3495918635UL, 4
116370522UL, 1384310379UL, 3660102574UL, 2030233939UL, 2759207091UL, 493479
23UL, 97526506UL, 2566932710UL, 1566181275UL, 3127827248UL, 578401670UL, 14
99229308UL, 2581732444UL, 279715551UL, 809690877UL, 1438444015UL, 878935323
UL, 1495277039UL, 3417305339UL, 2858903785UL, 3074075088UL, 603749086UL, 23
70669734UL, 391683868UL, 3933465331UL, 2884128106UL, 1478317876UL, 18649883
35UL, 2925823809UL, 4133578805UL, 218104493UL, 368652174UL, 1998600344UL, 1
109346044UL, 1716435313UL, 415435111UL, 91393686UL, 2536620737UL, 144006857
3UL, 481874870UL, 142128108UL, 988825519UL, 2077118779UL, 2858045339UL, 406
8162251UL, 115593872UL, 1364244587UL, 3550167006UL, 3728768059UL, 177242368
5UL, 2504624145UL, 248732306UL, 1412607307UL, 4081166331UL, 154438218UL, 16
52901877UL, 3932533490UL, 3142799969UL, 3154073676UL, 3112018078UL, 2757873
595UL, 2364830126UL, 2855791484UL, 793851407UL, 507785167UL, 263713916UL, 4
060700051UL, 3291978358UL, 1584226715UL, 2546417990UL, 450747961UL, 2951067
700UL, 2706009093UL, 1788578194UL, 4030171132UL, 2610979903UL, 573420740UL,
4269115622UL, 2180305819UL, 2646894726UL, 716649335UL, 3875715683UL, 85342
8184UL, 2436760738UL, 4190071217UL, 2754423535UL, 540698101UL, 4082489821UL
, 741976046UL, 267559495UL, 1591532642UL, 2500610323UL, 3203248679UL, 14731
2102UL, 2772368222UL, 1412987047UL, 2295185573UL, 1932341300UL, 898396308UL
, 1837129999UL, 3113914292UL, 2613354524UL, 3141601915UL, 276087167UL, 1887
389351UL, 757801450UL, 3752353732UL, 2745818074UL, 1442953464UL, 3802648347
UL, 223728071UL, 2169947402UL, 1338125300UL, 3642174036UL, 2794462634UL, 23
26349851UL, 862746036UL, 3577092599UL, 627103363UL, 552173564UL, 4142604459
UL, 2310329406UL, 583522272UL, 189323282UL, 1217612313UL, 73550248UL, 24346
92829UL, 2757269706UL, 2392210091UL, 3032922600UL, 3573904125UL, 2897178037
UL, 2632631469UL, 3085332665UL, 3775619904UL, 2563291734UL, 1351375865UL, 4
043427793UL, 1803743084UL, 3112116579UL, 522940594UL, 2690374983UL, 2613871
529UL, 3810037031UL, 1765642390UL, 534554747UL, 1930852049UL, 2264349344UL,
1680024716UL, 2112340059UL, 3387475367UL, 2080916186UL, 75966494UL, 390737
8472UL, 2636491350UL, 2176128529UL, 2236616671UL, 2372987046UL, 2604001339U
L, 3893075234UL, 3495918635UL, 4116370522UL, 534929913UL, 3660102574UL, 203
0233939UL, 2759207091UL, 49347923UL, 987575186UL, 2566932710UL, 1566181275U
L, 3127827248UL, 578401670UL, 3731513754UL, 2581732444UL, 279715551UL, 8096
90877UL, 1438444015UL, 2185866850UL, 1495277039UL, 3417305339UL, 2858903785
UL, 3074075088UL, 4198538376UL, 2370669734UL, 391683868UL, 3933465331UL, 28
84128106UL, 1400216510UL, 1864988335UL, 2925823809UL, 4133578805UL, 2181044
93UL, 2798390374UL, 1998600344UL, 1109346044UL, 1716435313UL, 415435111UL,
1892535124UL, 2536620737UL, 1440068573UL, 481874870UL, 142128108UL, 3290827
40UL, 2077118779UL, 2858045339UL, 4068162251UL, 115593872UL, 2644000449UL,
3550167006UL, 3728768059UL, 1772423685UL, 2504624145UL, 2140118619UL, 14126
07307UL, 4081166331UL, 154438218UL, 1652901877UL, 3804911318UL, 3142799969U
L, 3154073676UL, 3112018078UL, 2757873595UL, 50297646UL, 2855791484UL, 7938
51407UL, 507785167UL, 263713916UL, 3324588195UL, 3291978358UL, 1584226715UL
, 2546417990UL, 450747961UL, 3455625012UL, 2706009093UL, 1788578194UL, 4030
171132UL, 2610979903UL, 3835380965UL, 4269115622UL, 2180305819UL, 264689472
6UL, 716649335UL, 2607142354UL, 853428184UL, 2436760738UL, 4190071217UL, 27
54423535UL, 456808691UL, 4082489821UL, 741976046UL, 267559495UL, 1591532642
UL, 2722205042UL, 3203248679UL, 147312102UL, 2772368222UL, 1412987047UL, 19
50543946UL, 1932341300UL, 898396308UL, 1837129999UL, 3113914292UL, 42861639
2UL, 3141601915UL, 276087167UL, 1887389351UL, 757801450UL, 963534966UL, 274
5818074UL, 1442953464UL, 3802648347UL, 223728071UL, 229039300UL, 1338125300
UL, 3642174036UL, 2794462634UL, 2326349851UL, 206115203UL, 3577092599UL, 62
7103363UL, 552173564UL, 4142604459UL, 1492461846UL, 583522272UL, 189323282U
L, 1217612313UL, 73550248UL, 3552211807UL, 2757269706UL, 2392210091UL, 3032
922600UL, 3573904125UL, 810640644UL, 2632631469UL, 3085332665UL, 3775619904
UL, 2563291734UL, 922608790UL, 4043427793UL, 1803743084UL, 3112116579UL, 52
2940594UL, 1785093944UL, 2613871529UL, 3810037031UL, 1765642390UL, 53455474
7UL, 3528050076UL, 2264349344UL, 1680024716UL, 2112340059UL, 3387475367UL,
3295682653UL, 75966494UL, 3907378472UL, 2636491350UL, 2176128529UL, 3574915
532UL, 2372987046UL, 2604001339UL, 3893075234UL, 3495918635UL, 1280296085UL
, 534929913UL, 3660102574UL, 2030233939UL, 2759207091UL, 299776535UL, 98757
5186UL, 2566932710UL, 1566181275UL, 3127827248UL, 3874691533UL, 3731513754U
L, 2581732444UL, 279715551UL, 809690877UL, 3100791084UL, 2185866850UL, 1495
277039UL, 3417305339UL, 2858903785UL, 1310351481UL, 4198538376UL, 237066973
4UL, 391683868UL, 3933465331UL, 2749085130UL, 1400216510UL, 1864988335UL, 2
925823809UL, 4133578805UL, 3352814594UL, 2798390374UL, 1998600344UL, 110934
6044UL, 1716435313UL, 1571752941UL, 1892535124UL, 2536620737UL, 1440068573U
L, 481874870UL, 2485033697UL, 329082740UL, 2077118779UL, 2858045339UL, 4068
162251UL, 3837440666UL, 2644000449UL, 3550167006UL, 3728768059UL, 177242368
5UL, 1176559812UL, 2140118619UL, 1412607307UL, 4081166331UL, 154438218UL, 2
902622972UL, 3804911318UL, 3142799969UL, 3154073676UL, 3112018078UL, 240339
1233UL, 50297646UL, 2855791484UL, 793851407UL, 507785167UL, 2351826747UL, 3
324588195UL, 3291978358UL, 1584226715UL, 2546417990UL, 746876926UL, 3455625
012UL, 2706009093UL, 1788578194UL, 4030171132UL, 3779307353UL, 3835380965UL
, 4269115622UL, 2180305819UL, 2646894726UL, 2602235234UL, 2607142354UL, 853
428184UL, 2436760738UL, 4190071217UL, 2066757692UL, 456808691UL, 4082489821
UL, 741976046UL, 267559495UL, 3001080633UL, 2722205042UL, 3203248679UL, 147
312102UL, 2772368222UL, 89950260UL, 1950543946UL, 1932341300UL, 898396308UL
, 1837129999UL, 947911286UL, 428616392UL, 3141601915UL, 276087167UL, 188738
9351UL, 2583987247UL, 963534966UL, 2745818074UL, 1442953464UL, 3802648347UL
, 4229124441UL, 229039300UL, 1338125300UL, 3642174036UL, 2794462634UL, 2472
155633UL, 206115203UL, 3577092599UL, 627103363UL, 552173564UL, 2586882739UL
, 1492461846UL, 583522272UL, 189323282UL, 1217612313UL, 3501549884UL, 35522
11807UL, 2757269706UL, 2392210091UL, 3032922600UL, 740675778UL, 810640644UL
, 2632631469UL, 3085332665UL, 3775619904UL, 3643289881UL, 922608790UL, 4043
427793UL, 1803743084UL, 3112116579UL, 2213337398UL, 1785093944UL, 261387152
9UL, 3810037031UL, 1765642390UL, 762472016UL, 3528050076UL, 2264349344UL, 1
680024716UL, 2112340059UL, 1372272974UL, 3295682653UL, 75966494UL, 39073784
72UL, 2636491350UL, 3117471955UL, 3574915532UL, 2372987046UL, 2604001339UL,
3893075234UL, 915576383UL, 1280296085UL, 534929913UL, 3660102574UL, 203023
3939UL, 346368350UL, 299776535UL, 987575186UL, 2566932710UL, 1566181275UL,
3535223896UL, 3874691533UL, 3731513754UL, 2581732444UL, 279715551UL, 245689
4951UL, 3100791084UL, 2185866850UL, 1495277039UL, 3417305339UL, 1618871086U
L, 1310351481UL, 4198538376UL, 2370669734UL, 391683868UL, 2009676005UL, 274
9085130UL, 1400216510UL, 1864988335UL, 2925823809UL, 58955107UL, 3352814594
UL, 2798390374UL, 1998600344UL, 1109346044UL, 3273979614UL, 1571752941UL, 1
892535124UL, 2536620737UL, 1440068573UL, 1174168447UL, 2485033697UL, 329082
740UL, 2077118779UL, 2858045339UL, 4062921629UL, 3837440666UL, 2644000449UL
, 3550167006UL, 3728768059UL, 2642133401UL, 1176559812UL, 2140118619UL, 141
2607307UL, 4081166331UL, 3124905304UL, 2902622972UL, 3804911318UL, 31427999
69UL, 3154073676UL, 1449454613UL, 2403391233UL, 50297646UL, 2855791484UL, 7
93851407UL, 3514201526UL, 2351826747UL, 3324588195UL, 3291978358UL, 1584226
715UL, 3636681672UL, 746876926UL, 3455625012UL, 2706009093UL, 1788578194UL,
3451519459UL, 3779307353UL, 3835380965UL, 4269115622UL, 2180305819UL, 3987
989524UL, 2602235234UL, 2607142354UL, 853428184UL, 2436760738UL, 2151617107
UL, 2066757692UL, 456808691UL, 4082489821UL, 741976046UL, 3590081269UL, 300
1080633UL, 2722205042UL, 3203248679UL, 147312102UL, 3432947806UL, 89950260U
L, 1950543946UL, 1932341300UL, 898396308UL, 3828432864UL, 947911286UL, 4286
16392UL, 3141601915UL, 276087167UL, 2517666433UL, 2583987247UL, 963534966UL
, 2745818074UL, 1442953464UL, 2223986807UL, 4229124441UL, 229039300UL, 1338
125300UL, 3642174036UL, 1053796945UL, 2472155633UL, 206115203UL, 3577092599
UL, 627103363UL, 1113276084UL, 2586882739UL, 1492461846UL, 583522272UL, 189
323282UL, 1490604990UL, 3501549884UL, 3552211807UL, 2757269706UL, 239221009
1UL, 3545407532UL, 740675778UL, 810640644UL, 2632631469UL, 3085332665UL, 75
5862267UL, 3643289881UL, 922608790UL, 4043427793UL, 1803743084UL, 195416663
0UL, 2213337398UL, 1785093944UL, 2613871529UL, 3810037031UL, 3042935707UL,
3162182177UL, 2791346436UL, 1901925289UL, 863100941UL, 3367519168UL, 197262
3238UL, 3664303070UL, 604922059UL, 3026817982UL, 1436412310UL, 4096180631UL
, 1597561857UL, 4206212303UL, 4127914332UL, 3228677359UL, 3985733659UL, 359
7290113UL, 4251197894UL, 3451370603UL, 609679338UL, 3360835257UL, 137223988
5UL, 638572328UL, 3806422284UL, 3974147336UL, 1804280837UL, 4209089291UL, 2
021797469UL, 3557188838UL, 409727186UL, 2114649178UL, 687702120UL, 25424459
92UL, 1235991799UL, 460479179UL, 2008348175UL, 887884478UL, 3942327811UL, 2
999928223UL, 4171339789UL, 2286339235UL, 1293442231UL, 1575942850UL, 761224
75UL, 1440527701UL, 2006558403UL, 1544148172UL, 895899367UL, 681826913UL, 4
094701935UL, 3995413790UL, 1027509154UL, 2264990896UL, 1938238113UL, 213430
250UL, 222469320UL, 609726517UL, 3581538106UL, 492802663UL, 120480843UL, 17
20004062UL, 1132674507UL, 911082758UL, 2909148131UL, 566658805UL, 396411444
5UL, 3483602509UL, 1793438750UL, 165562604UL, 3641830063UL, 2394205521UL, 3
404874822UL, 1672998096UL, 916151953UL, 1141264477UL, 3171661340UL, 3803396
219UL, 3018337382UL, 1863902683UL, 2474641928UL, 3250365071UL, 3897886220UL
, 1219701051UL, 51332576UL, 1358614881UL, 1707407492UL, 3670647816UL, 92335
7625UL, 343687395UL, 3991339686UL, 3913575403UL, 1267727936UL, 4001357856UL
, 3820224848UL, 2942896724UL, 3505936742UL, 1403285299UL, 1992762049UL, 567
748449UL, 2202721585UL, 2781324216UL, 1724850068UL, 2408314541UL, 307397581
3UL, 3992810029UL, 2475242354UL, 540562053UL, 2185198943UL, 3759352041UL, 3
373885614UL, 1132999410UL, 1097554565UL, 4089342358UL, 3239542922UL, 245174
8646UL, 407290679UL, 3188103200UL, 1708016248UL, 26848241UL, 2796711130UL,
3090711568UL, 4068389322UL, 3420916085UL, 3137567033UL, 2877819818UL, 22133
454UL, 4629160UL, 3703695249UL, 1920151708UL, 1175452162UL, 130015299UL, 33
31834713UL, 1099225384UL, 689254331UL, 1851083761UL, 2654970209UL, 32592979
36UL, 3742819314UL, 3524284766UL, 2291819083UL, 3494031861UL, 16242889UL, 3
545082774UL, 1997878108UL, 777447699UL, 4244916543UL, 3508640253UL, 3782278
393UL, 2107258964UL, 2139074576UL, 1383217899UL, 2337934322UL, 3181899620UL
, 1285955765UL, 2989610020UL, 3326862146UL, 1168587380UL, 801203532UL, 3020
809957UL, }, {3810471203UL, 1017064446UL, 1595207573UL, 441087832UL, 332674
6890UL, 3294064431UL, 167972517UL, 3625210015UL, 1011845006UL, 2980240819UL
, 1778354660UL, 3041730987UL, 1598611350UL, 2015169745UL, 2321724978UL, 339
0812967UL, 2432904511UL, 113261909UL, 3957193232UL, 3806115908UL, 296582892
9UL, 2035392295UL, 3500116619UL, 2881232416UL, 1672212265UL, 1607201428UL,
425148945UL, 1262591961UL, 2221781268UL, 4215047456UL, 2148245850UL, 278748
8981UL, 1077262192UL, 2085467561UL, 3053954888UL, 3584435116UL, 3013084787U
L, 287099941UL, 1290407232UL, 4078552287UL, 2658945475UL, 4251530898UL, 240
3086478UL, 2884923598UL, 3545110453UL, 4105390090UL, 343200643UL, 318988882
1UL, 4086304363UL, 3466483195UL, 259435633UL, 2846377387UL, 497258846UL, 27
2775541UL, 985737911UL, 2957688879UL, 2180784344UL, 3434619542UL, 364338483
8UL, 2228652440UL, 3107480718UL, 2208729807UL, 596436263UL, 3255120711UL, 3
248886970UL, 519242965UL, 602979109UL, 1619614UL, 1391563565UL, 56262588UL,
1584463910UL, 1849038201UL, 728022295UL, 848624947UL, 1813827408UL, 428214
945UL, 1246345586UL, 4213351865UL, 168985863UL, 456608054UL, 4277869380UL,
3886828599UL, 2264054549UL, 3110967170UL, 3138175314UL, 2649164828UL, 33693
78320UL, 3648350039UL, 3524848759UL, 1468470706UL, 3558859222UL, 2669673235
UL, 831851874UL, 4285651092UL, 4224147373UL, 1088456706UL, 231954609UL, 311
8005852UL, 225508069UL, 883105389UL, 856371341UL, 2001356578UL, 639336670UL
, 2363501707UL, 3622399552UL, 4024065226UL, 1093546838UL, 4263608561UL, 185
2072422UL, 425195042UL, 2441102396UL, 296426333UL, 384641750UL, 3559334435U
L, 1757327033UL, 1016016207UL, 3595686646UL, 24777793UL, 623926105UL, 21691
95923UL, 1779396793UL, 646997837UL, 1459728476UL, 2644865980UL, 1994581089U
L, 3956278544UL, 919592580UL, 2153558858UL, 2029633394UL, 3837501009UL, 401
6560170UL, 484838096UL, 3652199054UL, 1971790561UL, 605295089UL, 637470291U
L, 278970544UL, 3574824693UL, 295866521UL, 1755035156UL, 2542341803UL, 1588
716357UL, 1502596918UL, 4124554133UL, 3547049843UL, 1768033045UL, 153173463
0UL, 101448323UL, 3233017580UL, 1793222944UL, 3187853500UL, 186000900UL, 80
3444571UL, 2820254958UL, 2009384608UL, 2384668855UL, 2222812920UL, 63360866
5UL, 2028480056UL, 1258028235UL, 545095949UL, 3810471203UL, 1017064446UL, 1
595207573UL, 441087832UL, 899068662UL, 3294064431UL, 167972517UL, 362521001
5UL, 1011845006UL, 3951305793UL, 1778354660UL, 3041730987UL, 1598611350UL,
2015169745UL, 1885149424UL, 3390812967UL, 2432904511UL, 113261909UL, 395719
3232UL, 3953443155UL, 2965828929UL, 2035392295UL, 3500116619UL, 2881232416U
L, 329153573UL, 1607201428UL, 425148945UL, 1262591961UL, 2221781268UL, 7802
8761UL, 2148245850UL, 2787488981UL, 1077262192UL, 2085467561UL, 647235899UL
, 3584435116UL, 3013084787UL, 287099941UL, 1290407232UL, 1467385694UL, 2658
945475UL, 4251530898UL, 2403086478UL, 2884923598UL, 3489351040UL, 410539009
0UL, 343200643UL, 3189888821UL, 4086304363UL, 3521512280UL, 259435633UL, 28
46377387UL, 497258846UL, 272775541UL, 1367093111UL, 2957688879UL, 218078434
4UL, 3434619542UL, 3643384838UL, 411877686UL, 3107480718UL, 2208729807UL, 5
96436263UL, 3255120711UL, 584605030UL, 519242965UL, 602979109UL, 1619614UL,
1391563565UL, 3902518209UL, 1584463910UL, 1849038201UL, 728022295UL, 84862
4947UL, 1932969318UL, 428214945UL, 1246345586UL, 4213351865UL, 168985863UL,
2770345237UL, 4277869380UL, 3886828599UL, 2264054549UL, 3110967170UL, 2953
581033UL, 2649164828UL, 3369378320UL, 3648350039UL, 3524848759UL, 238035397
7UL, 3558859222UL, 2669673235UL, 831851874UL, 4285651092UL, 1214052447UL, 1
088456706UL, 231954609UL, 3118005852UL, 225508069UL, 1766983646UL, 85637134
1UL, 2001356578UL, 639336670UL, 2363501707UL, 1782816591UL, 4024065226UL, 1
093546838UL, 4263608561UL, 1852072422UL, 1149716600UL, 2441102396UL, 296426
333UL, 384641750UL, 3559334435UL, 2391309970UL, 1016016207UL, 3595686646UL,
24777793UL, 623926105UL, 362098678UL, 1779396793UL, 646997837UL, 145972847
6UL, 2644865980UL, 3238673748UL, 3956278544UL, 919592580UL, 2153558858UL, 2
029633394UL, 115778559UL, 4016560170UL, 484838096UL, 3652199054UL, 19717905
61UL, 737357475UL, 637470291UL, 278970544UL, 3574824693UL, 295866521UL, 398
9745853UL, 2542341803UL, 1588716357UL, 1502596918UL, 4124554133UL, 30168497
44UL, 1768033045UL, 1531734630UL, 101448323UL, 3233017580UL, 4157527581UL,
3187853500UL, 186000900UL, 803444571UL, 2820254958UL, 1980528062UL, 2384668
855UL, 2222812920UL, 633608665UL, 2028480056UL, 3166710281UL, 545095949UL,
3810471203UL, 1017064446UL, 1595207573UL, 693962828UL, 899068662UL, 3294064
431UL, 167972517UL, 3625210015UL, 1486040398UL, 3951305793UL, 1778354660UL,
3041730987UL, 1598611350UL, 2859363132UL, 1885149424UL, 3390812967UL, 2432
904511UL, 113261909UL, 664880478UL, 3953443155UL, 2965828929UL, 2035392295U
L, 3500116619UL, 558081801UL, 329153573UL, 1607201428UL, 425148945UL, 12625
91961UL, 3716247699UL, 78028761UL, 2148245850UL, 2787488981UL, 1077262192UL
, 4206362947UL, 647235899UL, 3584435116UL, 3013084787UL, 287099941UL, 25367
81098UL, 1467385694UL, 2658945475UL, 4251530898UL, 2403086478UL, 3075072413
UL, 3489351040UL, 4105390090UL, 343200643UL, 3189888821UL, 2540485172UL, 35
21512280UL, 259435633UL, 2846377387UL, 497258846UL, 2442427327UL, 136709311
1UL, 2957688879UL, 2180784344UL, 3434619542UL, 1593967423UL, 411877686UL, 3
107480718UL, 2208729807UL, 596436263UL, 1048686529UL, 584605030UL, 51924296
5UL, 602979109UL, 1619614UL, 2072745381UL, 3902518209UL, 1584463910UL, 1849
038201UL, 728022295UL, 846033949UL, 1932969318UL, 428214945UL, 1246345586UL
, 4213351865UL, 1066373275UL, 2770345237UL, 4277869380UL, 3886828599UL, 226
4054549UL, 1877859690UL, 2953581033UL, 2649164828UL, 3369378320UL, 36483500
39UL, 2537763389UL, 2380353977UL, 3558859222UL, 2669673235UL, 831851874UL,
522748140UL, 1214052447UL, 1088456706UL, 231954609UL, 3118005852UL, 1381269
315UL, 1766983646UL, 856371341UL, 2001356578UL, 639336670UL, 667275675UL, 1
782816591UL, 4024065226UL, 1093546838UL, 4263608561UL, 2057337961UL, 114971
6600UL, 2441102396UL, 296426333UL, 384641750UL, 340523210UL, 2391309970UL,
1016016207UL, 3595686646UL, 24777793UL, 3094832341UL, 362098678UL, 17793967
93UL, 646997837UL, 1459728476UL, 1169681568UL, 3238673748UL, 3956278544UL,
919592580UL, 2153558858UL, 388335108UL, 115778559UL, 4016560170UL, 48483809
6UL, 3652199054UL, 1764858181UL, 737357475UL, 637470291UL, 278970544UL, 357
4824693UL, 3671458900UL, 3989745853UL, 2542341803UL, 1588716357UL, 15025969
18UL, 2102871406UL, 3016849744UL, 1768033045UL, 1531734630UL, 101448323UL,
3964942332UL, 4157527581UL, 3187853500UL, 186000900UL, 803444571UL, 3425652
083UL, 1980528062UL, 2384668855UL, 2222812920UL, 633608665UL, 3035373876UL,
3166710281UL, 545095949UL, 3810471203UL, 1017064446UL, 669282349UL, 693962
828UL, 899068662UL, 3294064431UL, 167972517UL, 2007256988UL, 1486040398UL,
3951305793UL, 1778354660UL, 3041730987UL, 2827768941UL, 2859363132UL, 18851
49424UL, 3390812967UL, 2432904511UL, 3700915653UL, 664880478UL, 3953443155U
L, 2965828929UL, 2035392295UL, 1461208330UL, 558081801UL, 329153573UL, 1607
201428UL, 425148945UL, 1700881129UL, 3716247699UL, 78028761UL, 2148245850UL
, 2787488981UL, 2706775080UL, 4206362947UL, 647235899UL, 3584435116UL, 3013
084787UL, 2958545221UL, 2536781098UL, 1467385694UL, 2658945475UL, 425153089
8UL, 2241012567UL, 3075072413UL, 3489351040UL, 4105390090UL, 343200643UL, 4
90164649UL, 2540485172UL, 3521512280UL, 259435633UL, 2846377387UL, 40736118
31UL, 2442427327UL, 1367093111UL, 2957688879UL, 2180784344UL, 1835510773UL,
1593967423UL, 411877686UL, 3107480718UL, 2208729807UL, 3306732468UL, 10486
86529UL, 584605030UL, 519242965UL, 602979109UL, 2978864605UL, 2072745381UL,
3902518209UL, 1584463910UL, 1849038201UL, 3284115169UL, 846033949UL, 19329
69318UL, 428214945UL, 1246345586UL, 194166002UL, 1066373275UL, 2770345237UL
, 4277869380UL, 3886828599UL, 1874087886UL, 1877859690UL, 2953581033UL, 264
9164828UL, 3369378320UL, 4145454028UL, 2537763389UL, 2380353977UL, 35588592
22UL, 2669673235UL, 739345884UL, 522748140UL, 1214052447UL, 1088456706UL, 2
31954609UL, 3605603781UL, 1381269315UL, 1766983646UL, 856371341UL, 20013565
78UL, 2049940324UL, 667275675UL, 1782816591UL, 4024065226UL, 1093546838UL,
152524382UL, 2057337961UL, 1149716600UL, 2441102396UL, 296426333UL, 3195130
788UL, 340523210UL, 2391309970UL, 1016016207UL, 3595686646UL, 180492441UL,
3094832341UL, 362098678UL, 1779396793UL, 646997837UL, 2458167607UL, 1169681
568UL, 3238673748UL, 3956278544UL, 919592580UL, 3421005218UL, 388335108UL,
115778559UL, 4016560170UL, 484838096UL, 2649676374UL, 1764858181UL, 7373574
75UL, 637470291UL, 278970544UL, 2236401278UL, 3671458900UL, 3989745853UL, 2
542341803UL, 1588716357UL, 1241570134UL, 2102871406UL, 3016849744UL, 176803
3045UL, 1531734630UL, 1765654724UL, 3964942332UL, 4157527581UL, 3187853500U
L, 186000900UL, 2189716659UL, 3425652083UL, 1980528062UL, 2384668855UL, 222
2812920UL, 3955466207UL, 2426547616UL, 3846752458UL, 3015538636UL, 23425933
65UL, 3613176865UL, 3484860981UL, 4278370194UL, 1979143878UL, 1159739458UL,
3714038404UL, 396530346UL, 3276617756UL, 3293940597UL, 4050183149UL, 14185
71985UL, 402563753UL, 2702853013UL, 2289900621UL, 2267058511UL, 3482161995U
L, 3375026019UL, 1988640267UL, 3674438074UL, 4124612310UL, 1057883705UL, 43
4730475UL, 3210959778UL, 4102029739UL, 2140938750UL, 3176753074UL, 23569715
12UL, 3969685288UL, 1556275580UL, 2648433428UL, 3959375381UL, 478841344UL,
1496991528UL, 3309714981UL, 569990368UL, 3660587501UL, 2550379574UL, 117751
9842UL, 2652707373UL, 543943404UL, 1912551128UL, 2278132032UL, 1484596780UL
, 3570913985UL, 2982401320UL, 1413776035UL, 3177275459UL, 3036211597UL, 109
1740466UL, 3448424311UL, 1445187645UL, 3205024875UL, 3135795254UL, 82373872
9UL, 3742134467UL, 4066657438UL, 1226311678UL, 2403605393UL, 537573634UL, 3
457409768UL, 1940233423UL, 1761431281UL, 1129427309UL, 2443661283UL, 320081
4257UL, 4094866249UL, 2666869754UL, 604785127UL, 2213464116UL, 3002782918UL
, 468024929UL, 2490681314UL, 3666681384UL, 1583346053UL, 3049668798UL, 3592
153237UL, 2573082448UL, 3082970021UL, 1461796708UL, 832526980UL, 3728763274
UL, 355291229UL, 4029588456UL, 832358279UL, 2125298737UL, 3681181038UL, 324
5535160UL, 1333342738UL, 1868897492UL, 446790068UL, 1278093154UL, 209011861
5UL, 4158925515UL, 4062165914UL, 822726809UL, 1154960183UL, 286518382UL, 11
70424276UL, 2554691236UL, 3674133415UL, 2765714969UL, 2330865375UL, 1908307
334UL, 3537287082UL, 410252600UL, 3977128218UL, 424210327UL, 2919071615UL,
2715518134UL, 64568844UL, 480972649UL, 2488797168UL, 1302817038UL, 22139952
65UL, 4229997295UL, 2200797852UL, 109368057UL, 3033807022UL, 1907400078UL,
645977948UL, 1410909090UL, 3700787906UL, 3375062371UL, 629087832UL, 1344281
719UL, 4249981139UL, 3457543297UL, 1218556849UL, 864222854UL, 1458445945UL,
914545469UL, 3451164212UL, 1088025757UL, 1129933985UL, 953788883UL, 240617
2924UL, 170364546UL, 3505490646UL, 1027553899UL, 2864067776UL, 436854871UL,
1342782209UL, 761167471UL, 2660173631UL, 4159507498UL, 4172028400UL, 24422
54644UL, 2110123720UL, 2315991253UL, 873066601UL, 1725470559UL, 3831299052U
L, 678672031UL, 1585431329UL, 3495750550UL, }, {1998393432UL, 2665389278UL,
3989307699UL, 3267631636UL, 3861682977UL, 3243522970UL, 1243992413UL, 2200
497260UL, 3821883021UL, 4187123083UL, 3451270040UL, 3044132745UL, 210128724
9UL, 2340839784UL, 227040990UL, 1724350416UL, 3228881240UL, 3123386528UL, 4
279362126UL, 3098224464UL, 2635534069UL, 3622906431UL, 206207480UL, 1894245
533UL, 2152374527UL, 1011223653UL, 7271757UL, 2972858087UL, 207942127UL, 33
55362797UL, 2593296740UL, 174093751UL, 3713822176UL, 4212355586UL, 33356052
24UL, 1171716408UL, 2867257989UL, 1522213957UL, 2016192462UL, 4229688395UL,
2174928148UL, 1468226225UL, 3938290338UL, 493240317UL, 3229423344UL, 25854
75729UL, 3112454413UL, 1881171707UL, 2555908056UL, 1997546352UL, 380428329U
L, 3341885423UL, 3307510279UL, 3519476676UL, 3613100811UL, 2555826262UL, 10
9341943UL, 2382715395UL, 3883409616UL, 1593551879UL, 2163678014UL, 33797831
37UL, 2810374300UL, 1516064864UL, 561144874UL, 316017838UL, 1899237567UL, 7
0857401UL, 3435185465UL, 4234661323UL, 2580352177UL, 32879620UL, 4171670150
UL, 1986234067UL, 3589478191UL, 2073132526UL, 2603712175UL, 377997975UL, 24
74419397UL, 3110698341UL, 812664089UL, 1778922726UL, 1686111212UL, 97278413
8UL, 3936486236UL, 2711468739UL, 423435866UL, 1661961159UL, 802312780UL, 18
68728136UL, 1760295704UL, 3357409828UL, 215039860UL, 683184627UL, 401911106
4UL, 3609261689UL, 2167554309UL, 1831085281UL, 3389357802UL, 4193421575UL,
628277197UL, 2900207619UL, 993609502UL, 3429627083UL, 2636466084UL, 3652352
199UL, 1780133580UL, 1670387713UL, 4086070210UL, 4004540729UL, 783029246UL,
2165667566UL, 1739001057UL, 377639972UL, 1102689625UL, 1945278055UL, 39411
85940UL, 3685368326UL, 1881761572UL, 2201338934UL, 801752UL, 2729497735UL,
492844690UL, 2998826141UL, 3844964457UL, 3679088359UL, 2196391660UL, 422226
9404UL, 357321611UL, 3727170055UL, 1819614072UL, 2348798457UL, 4294366646UL
, 1952884323UL, 3574345216UL, 2040734807UL, 232392443UL, 4183498179UL, 2614
866055UL, 112120292UL, 3624018350UL, 3340709877UL, 3097507723UL, 1268833488
UL, 3570501956UL, 3338260086UL, 293812421UL, 3683058169UL, 1147960351UL, 28
3731890UL, 2171233479UL, 1830154455UL, 4036602681UL, 1996981699UL, 13280383
4UL, 40256165UL, 2158110401UL, 3575159090UL, 3196553513UL, 3559872992UL, 34
02884675UL, 1998393432UL, 2665389278UL, 3989307699UL, 3267631636UL, 3617519
767UL, 3243522970UL, 1243992413UL, 2200497260UL, 3821883021UL, 3715729085UL
, 3451270040UL, 3044132745UL, 2101287249UL, 2340839784UL, 3173635549UL, 172
4350416UL, 3228881240UL, 3123386528UL, 4279362126UL, 2287520039UL, 26355340
69UL, 3622906431UL, 206207480UL, 1894245533UL, 96723416UL, 1011223653UL, 72
71757UL, 2972858087UL, 207942127UL, 1668335352UL, 2593296740UL, 174093751UL
, 3713822176UL, 4212355586UL, 49226793UL, 1171716408UL, 2867257989UL, 15222
13957UL, 2016192462UL, 118712412UL, 2174928148UL, 1468226225UL, 3938290338U
L, 493240317UL, 3788174304UL, 2585475729UL, 3112454413UL, 1881171707UL, 255
5908056UL, 3351139844UL, 380428329UL, 3341885423UL, 3307510279UL, 351947667
6UL, 1368994724UL, 2555826262UL, 109341943UL, 2382715395UL, 3883409616UL, 1
561509458UL, 2163678014UL, 3379783137UL, 2810374300UL, 1516064864UL, 231325
2274UL, 316017838UL, 1899237567UL, 70857401UL, 3435185465UL, 2585770746UL,
2580352177UL, 32879620UL, 4171670150UL, 1986234067UL, 3317983509UL, 2073132
526UL, 2603712175UL, 377997975UL, 2474419397UL, 908728599UL, 812664089UL, 1
778922726UL, 1686111212UL, 972784138UL, 1992540005UL, 2711468739UL, 4234358
66UL, 1661961159UL, 802312780UL, 907108769UL, 1760295704UL, 3357409828UL, 2
15039860UL, 683184627UL, 2806826652UL, 3609261689UL, 2167554309UL, 18310852
81UL, 3389357802UL, 2755692689UL, 628277197UL, 2900207619UL, 993609502UL, 3
429627083UL, 3605915742UL, 3652352199UL, 1780133580UL, 1670387713UL, 408607
0210UL, 3717326627UL, 783029246UL, 2165667566UL, 1739001057UL, 377639972UL,
2355216626UL, 1945278055UL, 3941185940UL, 3685368326UL, 1881761572UL, 4024
097818UL, 801752UL, 2729497735UL, 492844690UL, 2998826141UL, 2719601647UL,
3679088359UL, 2196391660UL, 4222269404UL, 357321611UL, 1319821972UL, 181961
4072UL, 2348798457UL, 4294366646UL, 1952884323UL, 3573866689UL, 2040734807U
L, 232392443UL, 4183498179UL, 2614866055UL, 440744432UL, 3624018350UL, 3340
709877UL, 3097507723UL, 1268833488UL, 224895395UL, 3338260086UL, 293812421U
L, 3683058169UL, 1147960351UL, 3433425235UL, 2171233479UL, 1830154455UL, 40
36602681UL, 1996981699UL, 2875889721UL, 40256165UL, 2158110401UL, 357515909
0UL, 3196553513UL, 1094082574UL, 3402884675UL, 1998393432UL, 2665389278UL,
3989307699UL, 4068940467UL, 3617519767UL, 3243522970UL, 1243992413UL, 22004
97260UL, 441678457UL, 3715729085UL, 3451270040UL, 3044132745UL, 2101287249U
L, 2181502237UL, 3173635549UL, 1724350416UL, 3228881240UL, 3123386528UL, 19
68352124UL, 2287520039UL, 2635534069UL, 3622906431UL, 206207480UL, 20650935
99UL, 96723416UL, 1011223653UL, 7271757UL, 2972858087UL, 1094044749UL, 1668
335352UL, 2593296740UL, 174093751UL, 3713822176UL, 2887397643UL, 49226793UL
, 1171716408UL, 2867257989UL, 1522213957UL, 984348433UL, 118712412UL, 21749
28148UL, 1468226225UL, 3938290338UL, 2279430036UL, 3788174304UL, 2585475729
UL, 3112454413UL, 1881171707UL, 4247636500UL, 3351139844UL, 380428329UL, 33
41885423UL, 3307510279UL, 2887754196UL, 1368994724UL, 2555826262UL, 1093419
43UL, 2382715395UL, 2836761616UL, 1561509458UL, 2163678014UL, 3379783137UL,
2810374300UL, 1635278016UL, 2313252274UL, 316017838UL, 1899237567UL, 70857
401UL, 3481535811UL, 2585770746UL, 2580352177UL, 32879620UL, 4171670150UL,
2248003250UL, 3317983509UL, 2073132526UL, 2603712175UL, 377997975UL, 328616
2818UL, 908728599UL, 812664089UL, 1778922726UL, 1686111212UL, 4024815755UL,
1992540005UL, 2711468739UL, 423435866UL, 1661961159UL, 2257259057UL, 90710
8769UL, 1760295704UL, 3357409828UL, 215039860UL, 3917391198UL, 2806826652UL
, 3609261689UL, 2167554309UL, 1831085281UL, 4238043113UL, 2755692689UL, 628
277197UL, 2900207619UL, 993609502UL, 2036092353UL, 3605915742UL, 3652352199
UL, 1780133580UL, 1670387713UL, 118446953UL, 3717326627UL, 783029246UL, 216
5667566UL, 1739001057UL, 203160626UL, 2355216626UL, 1945278055UL, 394118594
0UL, 3685368326UL, 546361979UL, 4024097818UL, 801752UL, 2729497735UL, 49284
4690UL, 1023017124UL, 2719601647UL, 3679088359UL, 2196391660UL, 4222269404U
L, 621859651UL, 1319821972UL, 1819614072UL, 2348798457UL, 4294366646UL, 111
4888560UL, 3573866689UL, 2040734807UL, 232392443UL, 4183498179UL, 395950460
9UL, 440744432UL, 3624018350UL, 3340709877UL, 3097507723UL, 3613295037UL, 2
24895395UL, 3338260086UL, 293812421UL, 3683058169UL, 1655305863UL, 34334252
35UL, 2171233479UL, 1830154455UL, 4036602681UL, 3731384097UL, 2875889721UL,
40256165UL, 2158110401UL, 3575159090UL, 1847744924UL, 1094082574UL, 340288
4675UL, 1998393432UL, 2665389278UL, 3781866777UL, 4068940467UL, 3617519767U
L, 3243522970UL, 1243992413UL, 2723708256UL, 441678457UL, 3715729085UL, 345
1270040UL, 3044132745UL, 4013832842UL, 2181502237UL, 3173635549UL, 17243504
16UL, 3228881240UL, 2092292494UL, 1968352124UL, 2287520039UL, 2635534069UL,
3622906431UL, 3186333458UL, 2065093599UL, 96723416UL, 1011223653UL, 727175
7UL, 649658033UL, 1094044749UL, 1668335352UL, 2593296740UL, 174093751UL, 41
59420309UL, 2887397643UL, 49226793UL, 1171716408UL, 2867257989UL, 259007795
3UL, 984348433UL, 118712412UL, 2174928148UL, 1468226225UL, 1065322711UL, 22
79430036UL, 3788174304UL, 2585475729UL, 3112454413UL, 3932517386UL, 4247636
500UL, 3351139844UL, 380428329UL, 3341885423UL, 1285273904UL, 2887754196UL,
1368994724UL, 2555826262UL, 109341943UL, 2318470582UL, 2836761616UL, 15615
09458UL, 2163678014UL, 3379783137UL, 674658583UL, 1635278016UL, 2313252274U
L, 316017838UL, 1899237567UL, 2192372173UL, 3481535811UL, 2585770746UL, 258
0352177UL, 32879620UL, 300323274UL, 2248003250UL, 3317983509UL, 2073132526U
L, 2603712175UL, 3086543917UL, 3286162818UL, 908728599UL, 812664089UL, 1778
922726UL, 2263290659UL, 4024815755UL, 1992540005UL, 2711468739UL, 423435866
UL, 819027349UL, 2257259057UL, 907108769UL, 1760295704UL, 3357409828UL, 114
2221093UL, 3917391198UL, 2806826652UL, 3609261689UL, 2167554309UL, 41081558
75UL, 4238043113UL, 2755692689UL, 628277197UL, 2900207619UL, 3041719497UL,
2036092353UL, 3605915742UL, 3652352199UL, 1780133580UL, 2397410862UL, 11844
6953UL, 3717326627UL, 783029246UL, 2165667566UL, 2721690354UL, 203160626UL,
2355216626UL, 1945278055UL, 3941185940UL, 2768842108UL, 546361979UL, 40240
97818UL, 801752UL, 2729497735UL, 4045063232UL, 1023017124UL, 2719601647UL,
3679088359UL, 2196391660UL, 2666107451UL, 621859651UL, 1319821972UL, 181961
4072UL, 2348798457UL, 3555102623UL, 1114888560UL, 3573866689UL, 2040734807U
L, 232392443UL, 3359040541UL, 3959504609UL, 440744432UL, 3624018350UL, 3340
709877UL, 1477919696UL, 3613295037UL, 224895395UL, 3338260086UL, 293812421U
L, 4210187101UL, 1655305863UL, 3433425235UL, 2171233479UL, 1830154455UL, 41
50241150UL, 3731384097UL, 2875889721UL, 40256165UL, 2158110401UL, 335024668
7UL, 455561037UL, 2250400255UL, 3192153445UL, 3258870230UL, 1500391873UL, 4
142878334UL, 1155955691UL, 1483275844UL, 4189436981UL, 323745948UL, 1976017
426UL, 2804626790UL, 2717553615UL, 2315409034UL, 954508235UL, 3845175920UL,
3999878682UL, 1247696432UL, 1743319509UL, 2998248398UL, 3694350012UL, 4072
006361UL, 191306987UL, 2816321878UL, 1324077734UL, 1083060006UL, 3406855480
UL, 1619622379UL, 2160350UL, 3302238190UL, 3368021261UL, 3685228564UL, 3863
934685UL, 771728612UL, 854205233UL, 2304696695UL, 421449207UL, 1265752117UL
, 3852292419UL, 305345788UL, 1540622105UL, 1904883477UL, 833469256UL, 13440
6680UL, 3012455058UL, 4035477953UL, 2925192459UL, 1559200592UL, 3851612860U
L, 718484562UL, 1377960276UL, 1586892849UL, 1361298269UL, 3417917896UL, 128
1324499UL, 1012538763UL, 1350578667UL, 3946475598UL, 2982283954UL, 35487928
04UL, 284542749UL, 1194648577UL, 3087899716UL, 3966595444UL, 2088330116UL,
3641652062UL, 327128507UL, 593906557UL, 1092448919UL, 2459189516UL, 4053392
241UL, 3356198248UL, 2352376508UL, 470648997UL, 1017041256UL, 3234172340UL,
3928191489UL, 3266226858UL, 4219289150UL, 1229098319UL, 4275351308UL, 2720
777751UL, 3566728718UL, 638322822UL, 2369792461UL, 2869492261UL, 3120083828
UL, 1890399556UL, 3309991008UL, 3785452464UL, 4128660314UL, 3726791982UL, 1
67177896UL, 461294981UL, 3988638998UL, 2937794823UL, 3981029822UL, 11116814
02UL, 2015965721UL, 7261806UL, 2669786265UL, 1083582734UL, 3270228881UL, 38
92235938UL, 2695872715UL, 4246051290UL, 3214293333UL, 343604199UL, 32156048
88UL, 661024127UL, 2931754053UL, 3787840039UL, 2053363765UL, 363432336UL, 1
12334132UL, 2871797223UL, 138911320UL, 3981126938UL, 2027332192UL, 18047306
44UL, 590150270UL, 641538574UL, 6802174UL, 3551446076UL, 3908480472UL, 1004
531022UL, 2097228524UL, 1919074232UL, 154482247UL, 121437972UL, 1215661323U
L, 1178068273UL, 1097220699UL, 2823681422UL, 262636065UL, 2943371149UL, 176
8780720UL, 3866040605UL, 1855991583UL, 3988248086UL, 629223947UL, 338061233
0UL, 3552916762UL, 197596340UL, 573801686UL, 2049230598UL, 2910471867UL, 26
86314264UL, 1726228846UL, 3516983332UL, 726840185UL, 1241204222UL, 22375743
17UL, 70568042UL, 1932610099UL, 2221862221UL, 1510378092UL, 4050391637UL, 4
077539568UL, }, {3872117793UL, 803220151UL, 70843412UL, 1661103032UL, 19768
11457UL, 2186373604UL, 564259972UL, 1475436923UL, 2260980893UL, 4245534505U
L, 1075107552UL, 3692990573UL, 370098873UL, 4045905424UL, 2420395420UL, 233
2395402UL, 207483321UL, 622317750UL, 3004242500UL, 833623111UL, 3151161301U
L, 1629139881UL, 352228793UL, 2439953368UL, 3183333619UL, 2703537080UL, 321
8957129UL, 3164695888UL, 1741641842UL, 963394141UL, 4241612717UL, 103447678
4UL, 2035880432UL, 3977821313UL, 1543311495UL, 3010014356UL, 1638490901UL,
2364265378UL, 3420329129UL, 333361555UL, 1133565821UL, 1450937015UL, 616059
115UL, 3216393887UL, 3041978455UL, 3990855695UL, 1238628750UL, 512746184UL,
3256670217UL, 1616316512UL, 2791405051UL, 93474487UL, 2865892488UL, 190147
1398UL, 2930857966UL, 2178431077UL, 2325598341UL, 3189256113UL, 1302432091U
L, 808592927UL, 2945846737UL, 3487931071UL, 2018175258UL, 752981057UL, 1097
082589UL, 1307115286UL, 175147508UL, 3611190164UL, 850238914UL, 3318706185U
L, 199743319UL, 328621708UL, 3183670050UL, 3609998315UL, 4075306371UL, 3554
549067UL, 2119566187UL, 1498503842UL, 1261870696UL, 2216745780UL, 950288337
UL, 1117344941UL, 2150569143UL, 2899286760UL, 1594966374UL, 888858617UL, 35
840654UL, 2829539211UL, 2511395669UL, 3607190544UL, 3278412778UL, 224989590
7UL, 1320858068UL, 3576889788UL, 266766189UL, 1522426851UL, 1903494122UL, 1
928370573UL, 2628132591UL, 3322025904UL, 220280169UL, 433606853UL, 14289614
79UL, 986074592UL, 2128892987UL, 467697583UL, 1616913929UL, 325674890UL, 44
4442578UL, 649166208UL, 1689709565UL, 1493452467UL, 2222122038UL, 121114616
UL, 2134348225UL, 3512035688UL, 1283058921UL, 4230441398UL, 3701238559UL, 3
37534132UL, 1418548715UL, 1190006478UL, 500654385UL, 1766924757UL, 19446807
46UL, 940574010UL, 922744002UL, 186142284UL, 3131162902UL, 1693891092UL, 30
31823448UL, 2143051534UL, 1429025284UL, 1487843160UL, 3606456133UL, 2079235
652UL, 2447285474UL, 2669283767UL, 3232117829UL, 2490054343UL, 3225501736UL
, 2911340385UL, 382319031UL, 1516937595UL, 622543191UL, 1388990570UL, 17491
79860UL, 1924483707UL, 2593474505UL, 472539197UL, 122872799UL, 2586347240UL
, 880588515UL, 4046335279UL, 1712182607UL, 4270737941UL, 1336703451UL, 3390
078162UL, 382216945UL, 3733326081UL, 460422073UL, 3872117793UL, 803220151UL
, 70843412UL, 1661103032UL, 250339760UL, 2186373604UL, 564259972UL, 1475436
923UL, 2260980893UL, 657986735UL, 1075107552UL, 3692990573UL, 370098873UL,
4045905424UL, 3201950123UL, 2332395402UL, 207483321UL, 622317750UL, 3004242
500UL, 3732213278UL, 3151161301UL, 1629139881UL, 352228793UL, 2439953368UL,
3572618926UL, 2703537080UL, 3218957129UL, 3164695888UL, 1741641842UL, 6859
33373UL, 4241612717UL, 1034476784UL, 2035880432UL, 3977821313UL, 3855995181
UL, 3010014356UL, 1638490901UL, 2364265378UL, 3420329129UL, 2355603679UL, 1
133565821UL, 1450937015UL, 616059115UL, 3216393887UL, 1733804102UL, 3990855
695UL, 1238628750UL, 512746184UL, 3256670217UL, 2651059231UL, 2791405051UL,
93474487UL, 2865892488UL, 1901471398UL, 2113461797UL, 2178431077UL, 232559
8341UL, 3189256113UL, 1302432091UL, 2986990416UL, 2945846737UL, 3487931071U
L, 2018175258UL, 752981057UL, 2428033310UL, 1307115286UL, 175147508UL, 3611
190164UL, 850238914UL, 1033628405UL, 199743319UL, 328621708UL, 3183670050UL
, 3609998315UL, 4024297327UL, 3554549067UL, 2119566187UL, 1498503842UL, 126
1870696UL, 290361143UL, 950288337UL, 1117344941UL, 2150569143UL, 2899286760
UL, 168826051UL, 888858617UL, 35840654UL, 2829539211UL, 2511395669UL, 28908
82060UL, 3278412778UL, 2249895907UL, 1320858068UL, 3576889788UL, 1794920145
UL, 1522426851UL, 1903494122UL, 1928370573UL, 2628132591UL, 1251697758UL, 2
20280169UL, 433606853UL, 1428961479UL, 986074592UL, 2707115661UL, 467697583
UL, 1616913929UL, 325674890UL, 444442578UL, 122781510UL, 1689709565UL, 1493
452467UL, 2222122038UL, 121114616UL, 3425723636UL, 3512035688UL, 1283058921
UL, 4230441398UL, 3701238559UL, 1646155473UL, 1418548715UL, 1190006478UL, 5
00654385UL, 1766924757UL, 3920475367UL, 940574010UL, 922744002UL, 186142284
UL, 3131162902UL, 54639113UL, 3031823448UL, 2143051534UL, 1429025284UL, 148
7843160UL, 4152687885UL, 2079235652UL, 2447285474UL, 2669283767UL, 32321178
29UL, 1601035152UL, 3225501736UL, 2911340385UL, 382319031UL, 1516937595UL,
3508441679UL, 1388990570UL, 1749179860UL, 1924483707UL, 2593474505UL, 28354
03456UL, 122872799UL, 2586347240UL, 880588515UL, 4046335279UL, 2958058367UL
, 4270737941UL, 1336703451UL, 3390078162UL, 382216945UL, 450517882UL, 46042
2073UL, 3872117793UL, 803220151UL, 70843412UL, 2066343874UL, 250339760UL, 2
186373604UL, 564259972UL, 1475436923UL, 1683787449UL, 657986735UL, 10751075
52UL, 3692990573UL, 370098873UL, 2615082840UL, 3201950123UL, 2332395402UL,
207483321UL, 622317750UL, 2655424371UL, 3732213278UL, 3151161301UL, 1629139
881UL, 352228793UL, 3236724760UL, 3572618926UL, 2703537080UL, 3218957129UL,
3164695888UL, 9775065UL, 685933373UL, 4241612717UL, 1034476784UL, 20358804
32UL, 1621920075UL, 3855995181UL, 3010014356UL, 1638490901UL, 2364265378UL,
1509475888UL, 2355603679UL, 1133565821UL, 1450937015UL, 616059115UL, 36661
88236UL, 1733804102UL, 3990855695UL, 1238628750UL, 512746184UL, 3900473826U
L, 2651059231UL, 2791405051UL, 93474487UL, 2865892488UL, 222759186UL, 21134
61797UL, 2178431077UL, 2325598341UL, 3189256113UL, 2505499508UL, 2986990416
UL, 2945846737UL, 3487931071UL, 2018175258UL, 2766733928UL, 2428033310UL, 1
307115286UL, 175147508UL, 3611190164UL, 1909211603UL, 1033628405UL, 1997433
19UL, 328621708UL, 3183670050UL, 1680331218UL, 4024297327UL, 3554549067UL,
2119566187UL, 1498503842UL, 3516256046UL, 290361143UL, 950288337UL, 1117344
941UL, 2150569143UL, 3182619063UL, 168826051UL, 888858617UL, 35840654UL, 28
29539211UL, 645798943UL, 2890882060UL, 3278412778UL, 2249895907UL, 13208580
68UL, 1436708568UL, 1794920145UL, 1522426851UL, 1903494122UL, 1928370573UL,
3693049252UL, 1251697758UL, 220280169UL, 433606853UL, 1428961479UL, 372441
5861UL, 2707115661UL, 467697583UL, 1616913929UL, 325674890UL, 1448052253UL,
122781510UL, 1689709565UL, 1493452467UL, 2222122038UL, 2177448198UL, 34257
23636UL, 3512035688UL, 1283058921UL, 4230441398UL, 3050940272UL, 1646155473
UL, 1418548715UL, 1190006478UL, 500654385UL, 1106232UL, 3920475367UL, 94057
4010UL, 922744002UL, 186142284UL, 4144806511UL, 54639113UL, 3031823448UL, 2
143051534UL, 1429025284UL, 2067453848UL, 4152687885UL, 2079235652UL, 244728
5474UL, 2669283767UL, 428527087UL, 1601035152UL, 3225501736UL, 2911340385UL
, 382319031UL, 2565464472UL, 3508441679UL, 1388990570UL, 1749179860UL, 1924
483707UL, 1737735237UL, 2835403456UL, 122872799UL, 2586347240UL, 880588515U
L, 597822462UL, 2958058367UL, 4270737941UL, 1336703451UL, 3390078162UL, 253
2634475UL, 450517882UL, 460422073UL, 3872117793UL, 803220151UL, 801648827UL
, 2066343874UL, 250339760UL, 2186373604UL, 564259972UL, 3417948976UL, 16837
87449UL, 657986735UL, 1075107552UL, 3692990573UL, 2235306692UL, 2615082840U
L, 3201950123UL, 2332395402UL, 207483321UL, 699310933UL, 2655424371UL, 3732
213278UL, 3151161301UL, 1629139881UL, 1152704006UL, 3236724760UL, 357261892
6UL, 2703537080UL, 3218957129UL, 2726926336UL, 9775065UL, 685933373UL, 4241
612717UL, 1034476784UL, 2398119652UL, 1621920075UL, 3855995181UL, 301001435
6UL, 1638490901UL, 252854480UL, 1509475888UL, 2355603679UL, 1133565821UL, 1
450937015UL, 2655911639UL, 3666188236UL, 1733804102UL, 3990855695UL, 123862
8750UL, 1115900497UL, 3900473826UL, 2651059231UL, 2791405051UL, 93474487UL,
1862985957UL, 222759186UL, 2113461797UL, 2178431077UL, 2325598341UL, 41790
75132UL, 2505499508UL, 2986990416UL, 2945846737UL, 3487931071UL, 564667776U
L, 2766733928UL, 2428033310UL, 1307115286UL, 175147508UL, 1759077815UL, 190
9211603UL, 1033628405UL, 199743319UL, 328621708UL, 2552816198UL, 1680331218
UL, 4024297327UL, 3554549067UL, 2119566187UL, 2267805778UL, 3516256046UL, 2
90361143UL, 950288337UL, 1117344941UL, 2897506172UL, 3182619063UL, 16882605
1UL, 888858617UL, 35840654UL, 2035476068UL, 645798943UL, 2890882060UL, 3278
412778UL, 2249895907UL, 3278449102UL, 1436708568UL, 1794920145UL, 152242685
1UL, 1903494122UL, 1500763736UL, 3693049252UL, 1251697758UL, 220280169UL, 4
33606853UL, 3914497854UL, 3724415861UL, 2707115661UL, 467697583UL, 16169139
29UL, 918435305UL, 1448052253UL, 122781510UL, 1689709565UL, 1493452467UL, 6
09575172UL, 2177448198UL, 3425723636UL, 3512035688UL, 1283058921UL, 3661181
550UL, 3050940272UL, 1646155473UL, 1418548715UL, 1190006478UL, 1047301661UL
, 1106232UL, 3920475367UL, 940574010UL, 922744002UL, 2510633517UL, 41448065
11UL, 54639113UL, 3031823448UL, 2143051534UL, 3242814908UL, 2067453848UL, 4
152687885UL, 2079235652UL, 2447285474UL, 736638210UL, 428527087UL, 16010351
52UL, 3225501736UL, 2911340385UL, 1849570436UL, 2565464472UL, 3508441679UL,
1388990570UL, 1749179860UL, 84517579UL, 1737735237UL, 2835403456UL, 122872
799UL, 2586347240UL, 4002124614UL, 597822462UL, 2958058367UL, 4270737941UL,
1336703451UL, 3078170472UL, 1186434751UL, 700631413UL, 1497890797UL, 11953
47450UL, 2560167391UL, 1116697259UL, 1254138573UL, 747913260UL, 240954704UL
, 3107512667UL, 360584144UL, 3422778960UL, 3516528389UL, 3301260366UL, 1254
513537UL, 122269053UL, 1579582456UL, 873334104UL, 3918835024UL, 1731872444U
L, 1974410416UL, 1811172641UL, 4172523062UL, 4092675777UL, 4124987343UL, 19
36078756UL, 1757348689UL, 2694415512UL, 128641660UL, 1744777659UL, 31731167
29UL, 983733754UL, 1430789547UL, 701906842UL, 3367232568UL, 3266433501UL, 3
572590347UL, 1453272962UL, 2106553114UL, 993786201UL, 2149441250UL, 1295181
065UL, 2962229026UL, 3709052556UL, 3255608941UL, 3677730029UL, 483873127UL,
102227292UL, 2626265293UL, 2018984578UL, 2266388762UL, 1191709548UL, 21527
25916UL, 583672623UL, 2230473473UL, 1995194269UL, 1740347812UL, 2558095372U
L, 3070195183UL, 3023333227UL, 2497183195UL, 1908755188UL, 773027539UL, 364
6876518UL, 2272586839UL, 493318726UL, 2107067517UL, 2000805278UL, 253082963
6UL, 3183628745UL, 677565332UL, 1497629423UL, 82094920UL, 2214054433UL, 263
5367545UL, 470855467UL, 2184853389UL, 2942188934UL, 188335670UL, 3656661644
UL, 1883526235UL, 3990873975UL, 1490784356UL, 4047548172UL, 3149642641UL, 3
289988179UL, 2590918909UL, 2893039564UL, 2350687346UL, 4252624874UL, 153724
56UL, 1614496594UL, 2364847678UL, 2604511825UL, 422365460UL, 4195174772UL,
3266964836UL, 2008671995UL, 54038434UL, 781948549UL, 1276017666UL, 27563766
12UL, 2436825273UL, 1711863836UL, 3541493950UL, 3821378841UL, 1007557618UL,
345375815UL, 2081905201UL, 2227278118UL, 1185927141UL, 1082173792UL, 35673
61925UL, 1940465859UL, 541632942UL, 1830210248UL, 3757851982UL, 775883450UL
, 1666577465UL, 1004944607UL, 878440834UL, 2146344131UL, 4195798476UL, 3701
64841UL, 3649112729UL, 37066142UL, 2311278904UL, 1935745497UL, 2304799402UL
, 4107299626UL, 1348526232UL, 2473609635UL, 3284032699UL, 2374292786UL, 176
2329186UL, 857978496UL, 1039346432UL, 2621413355UL, 29961014UL, 3582263091U
L, 4268542513UL, 3890612190UL, 3096173646UL, 2026544230UL, 3856142618UL, 23
47115934UL, 319800326UL, 3255916105UL, 2430273059UL, 823505311UL, 874255188
UL, 1401925393UL, 4203707857UL, 4259159566UL, 2606881118UL, 1978288664UL, 1
447576038UL, 3860341401UL, 412510348UL, }, {4052471963UL, 683640040UL, 3043
876021UL, 3466644483UL, 4222418025UL, 3035140128UL, 1466027937UL, 18198088U
L, 3410320851UL, 3040963721UL, 488404231UL, 3157371815UL, 769336092UL, 3240
417718UL, 808582581UL, 2075839263UL, 835026995UL, 3123726486UL, 3284240985U
L, 1898453053UL, 3606056482UL, 512836002UL, 2715428547UL, 4182302879UL, 164
4882480UL, 3160187826UL, 390292489UL, 980889545UL, 2776206633UL, 2482799995
UL, 617042280UL, 3501667414UL, 689451808UL, 497018701UL, 238525753UL, 38901
63301UL, 896679896UL, 1544533015UL, 3412477225UL, 3116575138UL, 4250402651U
L, 3990990746UL, 819056741UL, 1459334146UL, 158377590UL, 3444755752UL, 8230
450UL, 1378706455UL, 684191332UL, 3217423797UL, 2842520097UL, 1631477948UL,
2591254230UL, 959644473UL, 1020694107UL, 1748401915UL, 3452514983UL, 38927
66171UL, 1227786994UL, 2086180800UL, 2394613217UL, 2091953150UL, 870094953U
L, 2306851481UL, 571550601UL, 488878212UL, 873197214UL, 2630100528UL, 20674
76907UL, 2162307009UL, 2026119728UL, 115875280UL, 2905867426UL, 248774881UL
, 3110900450UL, 2236032812UL, 1888510348UL, 708001855UL, 996960491UL, 35141
96956UL, 1407967546UL, 1826568876UL, 3659618284UL, 2614104317UL, 2230066308
UL, 1055135881UL, 2537437343UL, 1858044413UL, 2608594891UL, 2750681169UL, 3
241939420UL, 3966440877UL, 2375002886UL, 2417753441UL, 1405878685UL, 108113
3199UL, 1496940727UL, 382467042UL, 2745477587UL, 1209424459UL, 811187075UL,
1385604734UL, 2623887355UL, 3443875720UL, 394141555UL, 4142998949UL, 41954
14618UL, 1489846841UL, 2253433808UL, 1171450286UL, 84131191UL, 4387588UL, 2
641405140UL, 3525405389UL, 3273000909UL, 423660319UL, 2366546732UL, 3698878
607UL, 2161119729UL, 4263629085UL, 3029102089UL, 2692507376UL, 3266869596UL
, 1658012061UL, 1960169440UL, 1002311379UL, 3724446882UL, 2004188516UL, 999
513506UL, 2200093802UL, 4141037460UL, 351865836UL, 412875013UL, 1535823315U
L, 3880657632UL, 3109944987UL, 3207577548UL, 3462087941UL, 584875517UL, 263
5241084UL, 3834145971UL, 1693380373UL, 3524443732UL, 934775214UL, 196058884
7UL, 2226778032UL, 1044609478UL, 12199016UL, 1120582000UL, 226430296UL, 665
553142UL, 2570993348UL, 1685535237UL, 3325420136UL, 3925248326UL, 285534637
6UL, 1205558328UL, 808835317UL, 3295908896UL, 4170076136UL, 2438272365UL, 4
052471963UL, 683640040UL, 3043876021UL, 3466644483UL, 1385549869UL, 3035140
128UL, 1466027937UL, 18198088UL, 3410320851UL, 2171386836UL, 488404231UL, 3
157371815UL, 769336092UL, 3240417718UL, 2921774554UL, 2075839263UL, 8350269
95UL, 3123726486UL, 3284240985UL, 72352110UL, 3606056482UL, 512836002UL, 27
15428547UL, 4182302879UL, 3869483469UL, 3160187826UL, 390292489UL, 98088954
5UL, 2776206633UL, 1385691983UL, 617042280UL, 3501667414UL, 689451808UL, 49
7018701UL, 2600411809UL, 3890163301UL, 896679896UL, 1544533015UL, 341247722
5UL, 356556378UL, 4250402651UL, 3990990746UL, 819056741UL, 1459334146UL, 19
9003993UL, 3444755752UL, 8230450UL, 1378706455UL, 684191332UL, 1750733272UL
, 2842520097UL, 1631477948UL, 2591254230UL, 959644473UL, 2113375576UL, 1748
401915UL, 3452514983UL, 3892766171UL, 1227786994UL, 275473920UL, 2394613217
UL, 2091953150UL, 870094953UL, 2306851481UL, 897057645UL, 488878212UL, 8731
97214UL, 2630100528UL, 2067476907UL, 944114068UL, 2026119728UL, 115875280UL
, 2905867426UL, 248774881UL, 989201307UL, 2236032812UL, 1888510348UL, 70800
1855UL, 996960491UL, 2121706374UL, 1407967546UL, 1826568876UL, 3659618284UL
, 2614104317UL, 2931815032UL, 1055135881UL, 2537437343UL, 1858044413UL, 260
8594891UL, 1423973935UL, 3241939420UL, 3966440877UL, 2375002886UL, 24177534
41UL, 2514473440UL, 1081133199UL, 1496940727UL, 382467042UL, 2745477587UL,
81977310UL, 811187075UL, 1385604734UL, 2623887355UL, 3443875720UL, 21006298
79UL, 4142998949UL, 4195414618UL, 1489846841UL, 2253433808UL, 337182869UL,
84131191UL, 4387588UL, 2641405140UL, 3525405389UL, 661876463UL, 423660319UL
, 2366546732UL, 3698878607UL, 2161119729UL, 309510684UL, 3029102089UL, 2692
507376UL, 3266869596UL, 1658012061UL, 11119541UL, 1002311379UL, 3724446882U
L, 2004188516UL, 999513506UL, 3486722046UL, 4141037460UL, 351865836UL, 4128
75013UL, 1535823315UL, 2818130700UL, 3109944987UL, 3207577548UL, 3462087941
UL, 584875517UL, 322875622UL, 3834145971UL, 1693380373UL, 3524443732UL, 934
775214UL, 3879414752UL, 2226778032UL, 1044609478UL, 12199016UL, 1120582000U
L, 4207259464UL, 665553142UL, 2570993348UL, 1685535237UL, 3325420136UL, 553
869152UL, 2855346376UL, 1205558328UL, 808835317UL, 3295908896UL, 470585896U
L, 2438272365UL, 4052471963UL, 683640040UL, 3043876021UL, 1588419572UL, 138
5549869UL, 3035140128UL, 1466027937UL, 18198088UL, 363815288UL, 2171386836U
L, 488404231UL, 3157371815UL, 769336092UL, 2464768302UL, 2921774554UL, 2075
839263UL, 835026995UL, 3123726486UL, 4229246330UL, 72352110UL, 3606056482UL
, 512836002UL, 2715428547UL, 319830805UL, 3869483469UL, 3160187826UL, 39029
2489UL, 980889545UL, 2966401462UL, 1385691983UL, 617042280UL, 3501667414UL,
689451808UL, 4047377762UL, 2600411809UL, 3890163301UL, 896679896UL, 154453
3015UL, 764316452UL, 356556378UL, 4250402651UL, 3990990746UL, 819056741UL,
965331966UL, 199003993UL, 3444755752UL, 8230450UL, 1378706455UL, 51902971UL
, 1750733272UL, 2842520097UL, 1631477948UL, 2591254230UL, 426039404UL, 2113
375576UL, 1748401915UL, 3452514983UL, 3892766171UL, 2833368447UL, 275473920
UL, 2394613217UL, 2091953150UL, 870094953UL, 3524323828UL, 897057645UL, 488
878212UL, 873197214UL, 2630100528UL, 3939852929UL, 944114068UL, 2026119728U
L, 115875280UL, 2905867426UL, 3192643919UL, 989201307UL, 2236032812UL, 1888
510348UL, 708001855UL, 2166012172UL, 2121706374UL, 1407967546UL, 1826568876
UL, 3659618284UL, 135277096UL, 2931815032UL, 1055135881UL, 2537437343UL, 18
58044413UL, 2588429924UL, 1423973935UL, 3241939420UL, 3966440877UL, 2375002
886UL, 2477142003UL, 2514473440UL, 1081133199UL, 1496940727UL, 382467042UL,
1760129281UL, 81977310UL, 811187075UL, 1385604734UL, 2623887355UL, 4070531
513UL, 2100629879UL, 4142998949UL, 4195414618UL, 1489846841UL, 2688068550UL
, 337182869UL, 84131191UL, 4387588UL, 2641405140UL, 1837403234UL, 661876463
UL, 423660319UL, 2366546732UL, 3698878607UL, 2916121190UL, 309510684UL, 302
9102089UL, 2692507376UL, 3266869596UL, 303422295UL, 11119541UL, 1002311379U
L, 3724446882UL, 2004188516UL, 2652711421UL, 3486722046UL, 4141037460UL, 35
1865836UL, 412875013UL, 113149471UL, 2818130700UL, 3109944987UL, 3207577548
UL, 3462087941UL, 1443140792UL, 322875622UL, 3834145971UL, 1693380373UL, 35
24443732UL, 901891935UL, 3879414752UL, 2226778032UL, 1044609478UL, 12199016
UL, 2213168758UL, 4207259464UL, 665553142UL, 2570993348UL, 1685535237UL, 11
14492412UL, 553869152UL, 2855346376UL, 1205558328UL, 808835317UL, 326662629
4UL, 470585896UL, 2438272365UL, 4052471963UL, 683640040UL, 3581539398UL, 15
88419572UL, 1385549869UL, 3035140128UL, 1466027937UL, 4075470388UL, 3638152
88UL, 2171386836UL, 488404231UL, 3157371815UL, 2759472233UL, 2464768302UL,
2921774554UL, 2075839263UL, 835026995UL, 1030654310UL, 4229246330UL, 723521
10UL, 3606056482UL, 512836002UL, 961858496UL, 319830805UL, 3869483469UL, 31
60187826UL, 390292489UL, 2366221117UL, 2966401462UL, 1385691983UL, 61704228
0UL, 3501667414UL, 295865937UL, 4047377762UL, 2600411809UL, 3890163301UL, 8
96679896UL, 21714884UL, 764316452UL, 356556378UL, 4250402651UL, 3990990746U
L, 1012967081UL, 965331966UL, 199003993UL, 3444755752UL, 8230450UL, 1255302
023UL, 51902971UL, 1750733272UL, 2842520097UL, 1631477948UL, 2321320272UL,
426039404UL, 2113375576UL, 1748401915UL, 3452514983UL, 2847013518UL, 283336
8447UL, 275473920UL, 2394613217UL, 2091953150UL, 1250695522UL, 3524323828UL
, 897057645UL, 488878212UL, 873197214UL, 1452317325UL, 3939852929UL, 944114
068UL, 2026119728UL, 115875280UL, 4061820350UL, 3192643919UL, 989201307UL,
2236032812UL, 1888510348UL, 3986446165UL, 2166012172UL, 2121706374UL, 14079
67546UL, 1826568876UL, 2910745432UL, 135277096UL, 2931815032UL, 1055135881U
L, 2537437343UL, 2976455307UL, 2588429924UL, 1423973935UL, 3241939420UL, 39
66440877UL, 2418897705UL, 2477142003UL, 2514473440UL, 1081133199UL, 1496940
727UL, 1321648771UL, 1760129281UL, 81977310UL, 811187075UL, 1385604734UL, 1
7644628UL, 4070531513UL, 2100629879UL, 4142998949UL, 4195414618UL, 26973105
27UL, 2688068550UL, 337182869UL, 84131191UL, 4387588UL, 1724191700UL, 18374
03234UL, 661876463UL, 423660319UL, 2366546732UL, 693430992UL, 2916121190UL,
309510684UL, 3029102089UL, 2692507376UL, 3917396098UL, 303422295UL, 111195
41UL, 1002311379UL, 3724446882UL, 841468294UL, 2652711421UL, 3486722046UL,
4141037460UL, 351865836UL, 1733384185UL, 113149471UL, 2818130700UL, 3109944
987UL, 3207577548UL, 2326233100UL, 1443140792UL, 322875622UL, 3834145971UL,
1693380373UL, 1580706359UL, 901891935UL, 3879414752UL, 2226778032UL, 10446
09478UL, 3805470822UL, 2213168758UL, 4207259464UL, 665553142UL, 2570993348U
L, 3406548636UL, 1114492412UL, 553869152UL, 2855346376UL, 1205558328UL, 428
7831475UL, 1329654114UL, 2347235746UL, 2477803138UL, 2962371859UL, 36100242
83UL, 4197266903UL, 1162294689UL, 1746713323UL, 2815058477UL, 2152552186UL,
4214791071UL, 2382522482UL, 3713914466UL, 3974765132UL, 348354997UL, 16702
76150UL, 2173074887UL, 381736894UL, 3866219357UL, 1919366695UL, 3635118824U
L, 2298653261UL, 3534332682UL, 1627699897UL, 4168636618UL, 3787938690UL, 21
44231271UL, 2067679462UL, 217001062UL, 2308928337UL, 1620415125UL, 35265591
72UL, 749451561UL, 2456947371UL, 3543607786UL, 1893824735UL, 962598819UL, 2
332807164UL, 1691114891UL, 2543992233UL, 2914780639UL, 1610287145UL, 170059
9697UL, 3185174208UL, 552323208UL, 2367242224UL, 3797136972UL, 3415066418UL
, 2468049249UL, 1677937401UL, 40445671UL, 2886682530UL, 2585715434UL, 19493
2329UL, 2994003812UL, 3099556382UL, 680852222UL, 135838738UL, 1371063256UL,
995454898UL, 3754526418UL, 803635682UL, 634588682UL, 3869250783UL, 2442285
521UL, 1455637058UL, 570621479UL, 2512681851UL, 1220136924UL, 750260121UL,
2909903038UL, 1582019728UL, 955115170UL, 1608265445UL, 2157390890UL, 230367
8604UL, 1568394164UL, 831914289UL, 1971271392UL, 1294799854UL, 1489945167UL
, 442427880UL, 1305083700UL, 1211218668UL, 2380073713UL, 2798736785UL, 2193
524273UL, 3227386915UL, 1636588977UL, 3612937642UL, 435113647UL, 1591761830
UL, 536210039UL, 2475747073UL, 4223795480UL, 1786737271UL, 1444661534UL, 32
49410301UL, 3333695212UL, 4169107188UL, 3280638635UL, 702659930UL, 14441279
70UL, 225340755UL, 2255629368UL, 746584456UL, 3965677674UL, 2671132955UL, 2
080717656UL, 2145343886UL, 3712441197UL, 368422910UL, 1297685674UL, 4076123
901UL, 26214470UL, 2948764826UL, 40503299UL, 1198194334UL, 2100063637UL, 19
66331612UL, 2189582064UL, 2064696934UL, 1797550642UL, 3469793941UL, 2868963
812UL, 851437659UL, 240918534UL, 365060070UL, 3530600064UL, 39695324UL, 175
3898837UL, 1286976449UL, 3131971360UL, 2406485219UL, 3365373704UL, 32241134
03UL, 1651742834UL, 587601940UL, 1574206085UL, 3739575036UL, 1413669616UL,
38172232UL, 293127854UL, 4126190109UL, 1891744061UL, 787878666UL, 456643669
UL, 4228710325UL, 2025132037UL, 1492133135UL, 3122840937UL, 969442079UL, 32
72420439UL, 3836126369UL, 1877655562UL, 2766212758UL, 3867984746UL, 3348077
578UL, 1841216706UL, }, {1676507466UL, 1017841240UL, 2992644565UL, 47693615
8UL, 2468072723UL, 3113105154UL, 1154120402UL, 460889625UL, 1942263502UL, 1
761593999UL, 3020908939UL, 3078194866UL, 310971889UL, 1644896012UL, 3756044
556UL, 3549937583UL, 3710822994UL, 3554313733UL, 2174654326UL, 4251063242UL
, 2340485150UL, 950951909UL, 4288936895UL, 3744348848UL, 706644559UL, 10859
27825UL, 1595992020UL, 3288724966UL, 1367247946UL, 2950094970UL, 3925419886
UL, 2628739022UL, 2528254629UL, 3582224789UL, 3907345559UL, 3373329273UL, 4
255542251UL, 1185418446UL, 4018656113UL, 2854344020UL, 1381160022UL, 364243
8773UL, 4284399225UL, 935780030UL, 4142412144UL, 1263328494UL, 1154237693UL
, 2684443667UL, 3067549398UL, 4253090033UL, 1251034970UL, 1874233020UL, 322
2830495UL, 3866931656UL, 286048055UL, 3146635362UL, 1436483376UL, 282187649
5UL, 3927829532UL, 2648886905UL, 2142862852UL, 1368937545UL, 2647327844UL,
1072219385UL, 2621337706UL, 3543274652UL, 911792564UL, 1204178178UL, 412721
4323UL, 2821691380UL, 3101998294UL, 730811902UL, 1989156224UL, 2872353003UL
, 278290276UL, 1390223786UL, 2657819643UL, 552729795UL, 1736270535UL, 27592
07116UL, 1897013739UL, 3657020278UL, 1387364861UL, 1966588302UL, 1049203087
UL, 486446521UL, 3675999281UL, 714737345UL, 686837530UL, 85509025UL, 360908
9773UL, 2117061768UL, 3935682560UL, 3859508784UL, 4105287041UL, 1808988481U
L, 83680601UL, 1464326680UL, 1657693523UL, 3318062731UL, 1391154023UL, 2344
60119UL, 3551348221UL, 2245244809UL, 3635923821UL, 2814385745UL, 3497626257
UL, 916790795UL, 245338628UL, 2514528380UL, 3711787525UL, 2239286063UL, 105
4058916UL, 3963706010UL, 3176203796UL, 2230543409UL, 2173597546UL, 37867338
92UL, 1396036965UL, 1038764273UL, 2032556038UL, 3216540537UL, 3298170974UL,
1008892557UL, 141155464UL, 1863766055UL, 3931110690UL, 191299053UL, 201913
9711UL, 2409528317UL, 739418419UL, 1377144055UL, 2876702705UL, 3911939673UL
, 1197696462UL, 2814009721UL, 600813233UL, 1535885024UL, 1486280357UL, 3084
650548UL, 2324695947UL, 2293284974UL, 2036339249UL, 3465600153UL, 162444610
8UL, 327866771UL, 3356772175UL, 1826625240UL, 1947102360UL, 3661848193UL, 1
421374867UL, 3228945021UL, 1358646008UL, 1067180174UL, 2190741258UL, 643362
354UL, 109899594UL, 2064362635UL, 3249674888UL, 2165543887UL, 4180291913UL,
1676507466UL, 1017841240UL, 2992644565UL, 476936158UL, 3608467942UL, 31131
05154UL, 1154120402UL, 460889625UL, 1942263502UL, 1862994005UL, 3020908939U
L, 3078194866UL, 310971889UL, 1644896012UL, 693774191UL, 3549937583UL, 3710
822994UL, 3554313733UL, 2174654326UL, 37658897UL, 2340485150UL, 950951909UL
, 4288936895UL, 3744348848UL, 2258231402UL, 1085927825UL, 1595992020UL, 328
8724966UL, 1367247946UL, 3850509554UL, 3925419886UL, 2628739022UL, 25282546
29UL, 3582224789UL, 3124287811UL, 3373329273UL, 4255542251UL, 1185418446UL,
4018656113UL, 1989726178UL, 1381160022UL, 3642438773UL, 4284399225UL, 9357
80030UL, 3622052196UL, 1263328494UL, 1154237693UL, 2684443667UL, 3067549398
UL, 2786224913UL, 1251034970UL, 1874233020UL, 3222830495UL, 3866931656UL, 1
529490307UL, 3146635362UL, 1436483376UL, 2821876495UL, 3927829532UL, 979247
444UL, 2142862852UL, 1368937545UL, 2647327844UL, 1072219385UL, 294065371UL,
3543274652UL, 911792564UL, 1204178178UL, 4127214323UL, 103582737UL, 310199
8294UL, 730811902UL, 1989156224UL, 2872353003UL, 1885087777UL, 1390223786UL
, 2657819643UL, 552729795UL, 1736270535UL, 3325206451UL, 1897013739UL, 3657
020278UL, 1387364861UL, 1966588302UL, 2117065739UL, 486446521UL, 3675999281
UL, 714737345UL, 686837530UL, 3946214694UL, 3609089773UL, 2117061768UL, 393
5682560UL, 3859508784UL, 2916136885UL, 1808988481UL, 83680601UL, 1464326680
UL, 1657693523UL, 3438751781UL, 1391154023UL, 234460119UL, 3551348221UL, 22
45244809UL, 3948410079UL, 2814385745UL, 3497626257UL, 916790795UL, 24533862
8UL, 1767303496UL, 3711787525UL, 2239286063UL, 1054058916UL, 3963706010UL,
4140631909UL, 2230543409UL, 2173597546UL, 3786733892UL, 1396036965UL, 11160
33475UL, 2032556038UL, 3216540537UL, 3298170974UL, 1008892557UL, 667272562U
L, 1863766055UL, 3931110690UL, 191299053UL, 2019139711UL, 272901326UL, 7394
18419UL, 1377144055UL, 2876702705UL, 3911939673UL, 3839312742UL, 2814009721
UL, 600813233UL, 1535885024UL, 1486280357UL, 4256065219UL, 2324695947UL, 22
93284974UL, 2036339249UL, 3465600153UL, 1215859603UL, 327866771UL, 33567721
75UL, 1826625240UL, 1947102360UL, 4240407984UL, 1421374867UL, 3228945021UL,
1358646008UL, 1067180174UL, 4100357988UL, 643362354UL, 109899594UL, 206436
2635UL, 3249674888UL, 2898852084UL, 4180291913UL, 1676507466UL, 1017841240U
L, 2992644565UL, 1569683812UL, 3608467942UL, 3113105154UL, 1154120402UL, 46
0889625UL, 966040649UL, 1862994005UL, 3020908939UL, 3078194866UL, 310971889
UL, 786634113UL, 693774191UL, 3549937583UL, 3710822994UL, 3554313733UL, 157
8429713UL, 37658897UL, 2340485150UL, 950951909UL, 4288936895UL, 2528123823U
L, 2258231402UL, 1085927825UL, 1595992020UL, 3288724966UL, 3544041088UL, 38
50509554UL, 3925419886UL, 2628739022UL, 2528254629UL, 2562145937UL, 3124287
811UL, 3373329273UL, 4255542251UL, 1185418446UL, 3693565710UL, 1989726178UL
, 1381160022UL, 3642438773UL, 4284399225UL, 3271478204UL, 3622052196UL, 126
3328494UL, 1154237693UL, 2684443667UL, 3615401444UL, 2786224913UL, 12510349
70UL, 1874233020UL, 3222830495UL, 2572413057UL, 1529490307UL, 3146635362UL,
1436483376UL, 2821876495UL, 3993894153UL, 979247444UL, 2142862852UL, 13689
37545UL, 2647327844UL, 1353904396UL, 294065371UL, 3543274652UL, 911792564UL
, 1204178178UL, 3165709748UL, 103582737UL, 3101998294UL, 730811902UL, 19891
56224UL, 893293786UL, 1885087777UL, 1390223786UL, 2657819643UL, 552729795UL
, 3388458110UL, 3325206451UL, 1897013739UL, 3657020278UL, 1387364861UL, 302
5318046UL, 2117065739UL, 486446521UL, 3675999281UL, 714737345UL, 2085926890
UL, 3946214694UL, 3609089773UL, 2117061768UL, 3935682560UL, 868009118UL, 29
16136885UL, 1808988481UL, 83680601UL, 1464326680UL, 797410789UL, 3438751781
UL, 1391154023UL, 234460119UL, 3551348221UL, 4068940987UL, 3948410079UL, 28
14385745UL, 3497626257UL, 916790795UL, 3722456098UL, 1767303496UL, 37117875
25UL, 2239286063UL, 1054058916UL, 2030352819UL, 4140631909UL, 2230543409UL,
2173597546UL, 3786733892UL, 3211336683UL, 1116033475UL, 2032556038UL, 3216
540537UL, 3298170974UL, 2589589144UL, 667272562UL, 1863766055UL, 3931110690
UL, 191299053UL, 1139480458UL, 272901326UL, 739418419UL, 1377144055UL, 2876
702705UL, 1954361769UL, 3839312742UL, 2814009721UL, 600813233UL, 1535885024
UL, 3587775605UL, 4256065219UL, 2324695947UL, 2293284974UL, 2036339249UL, 1
534849280UL, 1215859603UL, 327866771UL, 3356772175UL, 1826625240UL, 7203726
69UL, 4240407984UL, 1421374867UL, 3228945021UL, 1358646008UL, 3409069246UL,
4100357988UL, 643362354UL, 109899594UL, 2064362635UL, 4243434294UL, 289885
2084UL, 4180291913UL, 1676507466UL, 1017841240UL, 3243922356UL, 1569683812U
L, 3608467942UL, 3113105154UL, 1154120402UL, 1479311403UL, 966040649UL, 186
2994005UL, 3020908939UL, 3078194866UL, 1556392996UL, 786634113UL, 693774191
UL, 3549937583UL, 3710822994UL, 920664071UL, 1578429713UL, 37658897UL, 2340
485150UL, 950951909UL, 740197415UL, 2528123823UL, 2258231402UL, 1085927825U
L, 1595992020UL, 2580760267UL, 3544041088UL, 3850509554UL, 3925419886UL, 26
28739022UL, 3867556156UL, 2562145937UL, 3124287811UL, 3373329273UL, 4255542
251UL, 3185271749UL, 3693565710UL, 1989726178UL, 1381160022UL, 3642438773UL
, 3042165367UL, 3271478204UL, 3622052196UL, 1263328494UL, 1154237693UL, 101
6814036UL, 3615401444UL, 2786224913UL, 1251034970UL, 1874233020UL, 29560869
71UL, 2572413057UL, 1529490307UL, 3146635362UL, 1436483376UL, 1513970396UL,
3993894153UL, 979247444UL, 2142862852UL, 1368937545UL, 3275665128UL, 13539
04396UL, 294065371UL, 3543274652UL, 911792564UL, 2209636872UL, 3165709748UL
, 103582737UL, 3101998294UL, 730811902UL, 965151434UL, 893293786UL, 1885087
777UL, 1390223786UL, 2657819643UL, 3278634059UL, 3388458110UL, 3325206451UL
, 1897013739UL, 3657020278UL, 4293473749UL, 3025318046UL, 2117065739UL, 486
446521UL, 3675999281UL, 620561205UL, 2085926890UL, 3946214694UL, 3609089773
UL, 2117061768UL, 163384588UL, 868009118UL, 2916136885UL, 1808988481UL, 836
80601UL, 10243015UL, 797410789UL, 3438751781UL, 1391154023UL, 234460119UL,
1278218413UL, 4068940987UL, 3948410079UL, 2814385745UL, 3497626257UL, 12332
72798UL, 3722456098UL, 1767303496UL, 3711787525UL, 2239286063UL, 3968895688
UL, 2030352819UL, 4140631909UL, 2230543409UL, 2173597546UL, 2866251044UL, 3
211336683UL, 1116033475UL, 2032556038UL, 3216540537UL, 4233849723UL, 258958
9144UL, 667272562UL, 1863766055UL, 3931110690UL, 2468422423UL, 1139480458UL
, 272901326UL, 739418419UL, 1377144055UL, 4240143411UL, 1954361769UL, 38393
12742UL, 2814009721UL, 600813233UL, 3976840004UL, 3587775605UL, 4256065219U
L, 2324695947UL, 2293284974UL, 437604123UL, 1534849280UL, 1215859603UL, 327
866771UL, 3356772175UL, 2757237699UL, 720372669UL, 4240407984UL, 1421374867
UL, 3228945021UL, 3284801305UL, 3409069246UL, 4100357988UL, 643362354UL, 10
9899594UL, 1301585321UL, 2528806870UL, 1838904064UL, 448772403UL, 109784974
0UL, 1899994097UL, 618309123UL, 1911948510UL, 2309256224UL, 1861398151UL, 9
05306403UL, 1067595802UL, 36868624UL, 3780886191UL, 835126206UL, 3190251977
UL, 2672497726UL, 2085944002UL, 2912993968UL, 2493776706UL, 667136329UL, 14
74890786UL, 2383346554UL, 943528949UL, 3376706013UL, 2495573574UL, 14495634
5UL, 793159960UL, 1591274917UL, 477107637UL, 1383815442UL, 67384899UL, 2355
242218UL, 1687409818UL, 3801093871UL, 2108217811UL, 3455908733UL, 417216079
7UL, 3935534685UL, 631067839UL, 1187677548UL, 2280856137UL, 3020767646UL, 2
063176246UL, 3736904984UL, 2952933848UL, 2975164686UL, 4144473303UL, 346709
77UL, 1250976509UL, 3484166554UL, 1532744745UL, 225700994UL, 1878713627UL,
2122358980UL, 1456610194UL, 2917522161UL, 2818947075UL, 102678939UL, 537438
58UL, 2095250656UL, 4023979225UL, 3094092874UL, 4128760696UL, 3411610028UL,
3020200609UL, 2225866341UL, 586320946UL, 63813522UL, 1238216159UL, 2825692
263UL, 2169937231UL, 3298517640UL, 1542128261UL, 2205544184UL, 1258655704UL
, 2629012083UL, 4113650203UL, 3198617867UL, 2742310794UL, 3372657381UL, 311
5904410UL, 1948638822UL, 1123521744UL, 1080429281UL, 4086706732UL, 41426932
11UL, 817377147UL, 2570194641UL, 26001503UL, 2861456160UL, 4185725555UL, 25
73003804UL, 1618628779UL, 2588489212UL, 3996192609UL, 1555844274UL, 1003123
505UL, 1326350123UL, 1130583849UL, 3017128756UL, 74119042UL, 4041266437UL,
1938014170UL, 3528465794UL, 4203969698UL, 1913054398UL, 3617979809UL, 22188
10167UL, 2453899816UL, 1997423206UL, 477446533UL, 303090065UL, 757937082UL,
1523238256UL, 3140505311UL, 1422588701UL, 3642014639UL, 1740624195UL, 1276
017154UL, 3072526193UL, 3675105122UL, 1335122682UL, 4080595263UL, 230851942
0UL, 3299182769UL, 1461978532UL, 3098694217UL, 2982399822UL, 3088698511UL,
586759229UL, 3548750902UL, 1449857891UL, 2866451663UL, 2525162286UL, 572946
02UL, 4107991297UL, 1214672265UL, 2940391280UL, 4285346034UL, 3338216759UL,
737207923UL, 4264163846UL, 59219141UL, 2300024654UL, 1876616814UL, 1976543
605UL, 783571061UL, 1724699622UL, 1967524469UL, 1650309916UL, 3322257631UL,
3975521122UL, 273342162UL, 1156754241UL, 185315896UL, 3368133921UL, 663146
55UL, 4153777915UL, 3519901897UL, }, {3672467167UL, 68684525UL, 1738833632U
L, 3081329135UL, 2583806115UL, 2291130512UL, 503032614UL, 3658059597UL, 571
493931UL, 685537959UL, 3498787788UL, 422428426UL, 3879256913UL, 1173158320U
L, 4000800121UL, 298972869UL, 1718342816UL, 2541691685UL, 2490502642UL, 232
1452806UL, 4223212804UL, 1812334632UL, 3717655725UL, 4238191852UL, 30013071
65UL, 2621896355UL, 2572404999UL, 3590094954UL, 760765206UL, 2293618001UL,
1392353032UL, 1733137169UL, 2674005018UL, 4067961151UL, 1505710487UL, 45107
8217UL, 2591688848UL, 12635611UL, 507045428UL, 694822241UL, 1789383090UL, 1
140183890UL, 1720695967UL, 1994318191UL, 3340349873UL, 2793804971UL, 105443
3135UL, 2345087879UL, 3179939285UL, 1651968615UL, 1793223686UL, 1055357758U
L, 914271617UL, 483007580UL, 2127727816UL, 2754998083UL, 3179053982UL, 5984
42002UL, 1950227301UL, 213053613UL, 3566888111UL, 2832258993UL, 4260365359U
L, 443662829UL, 1706542890UL, 3852730296UL, 3643260763UL, 2163607277UL, 181
2905006UL, 171529637UL, 215187467UL, 2369406909UL, 1929000706UL, 2572441025
UL, 2133955541UL, 810692262UL, 1337974799UL, 4030350704UL, 2159178715UL, 37
69451556UL, 1026825278UL, 593628480UL, 1817383139UL, 878832429UL, 225387635
0UL, 203612980UL, 2102950440UL, 3407143936UL, 1912362251UL, 1595387637UL, 2
827580539UL, 305467658UL, 3292706746UL, 44135525UL, 4001933553UL, 369734308
9UL, 760470915UL, 587414402UL, 1419378814UL, 2852774010UL, 3891626781UL, 27
57016765UL, 1090707384UL, 3997074427UL, 1047182100UL, 2855539022UL, 3622915
9UL, 1591415533UL, 3471572739UL, 1237952140UL, 2614469314UL, 213338525UL, 8
86212578UL, 2620301943UL, 713590207UL, 2430496777UL, 1198164420UL, 26448416
98UL, 3654164701UL, 36283572UL, 1461695896UL, 1770331341UL, 1641501876UL, 3
470919184UL, 3181021559UL, 3053795110UL, 3533531372UL, 3134337355UL, 668308
383UL, 388340999UL, 3221275220UL, 1589659138UL, 294382235UL, 1447443579UL,
690177534UL, 1799726917UL, 2838977761UL, 4172949119UL, 2360858031UL, 159385
920UL, 2248389027UL, 1790015671UL, 3925738275UL, 1049918544UL, 4107349511UL
, 1619955951UL, 4188275966UL, 1672572975UL, 2672697497UL, 1863413666UL, 747
724021UL, 4037561738UL, 1605940213UL, 445253292UL, 3362434828UL, 610898209U
L, 1473244091UL, 735444769UL, 1540599852UL, 2449351720UL, 1032410949UL, 367
2467167UL, 68684525UL, 1738833632UL, 3081329135UL, 519684794UL, 2291130512U
L, 503032614UL, 3658059597UL, 571493931UL, 2400186105UL, 3498787788UL, 4224
28426UL, 3879256913UL, 1173158320UL, 4120704752UL, 298972869UL, 1718342816U
L, 2541691685UL, 2490502642UL, 1686027891UL, 4223212804UL, 1812334632UL, 37
17655725UL, 4238191852UL, 642431972UL, 2621896355UL, 2572404999UL, 35900949
54UL, 760765206UL, 2949609717UL, 1392353032UL, 1733137169UL, 2674005018UL,
4067961151UL, 1526077846UL, 451078217UL, 2591688848UL, 12635611UL, 50704542
8UL, 2417951415UL, 1789383090UL, 1140183890UL, 1720695967UL, 1994318191UL,
3465605863UL, 2793804971UL, 1054433135UL, 2345087879UL, 3179939285UL, 30792
97626UL, 1793223686UL, 1055357758UL, 914271617UL, 483007580UL, 306802527UL,
2754998083UL, 3179053982UL, 598442002UL, 1950227301UL, 2473418737UL, 35668
88111UL, 2832258993UL, 4260365359UL, 443662829UL, 2097776414UL, 3852730296U
L, 3643260763UL, 2163607277UL, 1812905006UL, 3957721904UL, 215187467UL, 236
9406909UL, 1929000706UL, 2572441025UL, 3779486126UL, 810692262UL, 133797479
9UL, 4030350704UL, 2159178715UL, 1127012865UL, 1026825278UL, 593628480UL, 1
817383139UL, 878832429UL, 361018423UL, 203612980UL, 2102950440UL, 340714393
6UL, 1912362251UL, 1475218277UL, 2827580539UL, 305467658UL, 3292706746UL, 4
4135525UL, 1900092336UL, 3697343089UL, 760470915UL, 587414402UL, 1419378814
UL, 343303227UL, 3891626781UL, 2757016765UL, 1090707384UL, 3997074427UL, 74
5490961UL, 2855539022UL, 36229159UL, 1591415533UL, 3471572739UL, 3920625546
UL, 2614469314UL, 213338525UL, 886212578UL, 2620301943UL, 827771411UL, 2430
496777UL, 1198164420UL, 2644841698UL, 3654164701UL, 2747674190UL, 146169589
6UL, 1770331341UL, 1641501876UL, 3470919184UL, 919857376UL, 3053795110UL, 3
533531372UL, 3134337355UL, 668308383UL, 201138876UL, 3221275220UL, 15896591
38UL, 294382235UL, 1447443579UL, 4211579707UL, 1799726917UL, 2838977761UL,
4172949119UL, 2360858031UL, 416103844UL, 2248389027UL, 1790015671UL, 392573
8275UL, 1049918544UL, 3481887924UL, 1619955951UL, 4188275966UL, 1672572975U
L, 2672697497UL, 564854400UL, 747724021UL, 4037561738UL, 1605940213UL, 4452
53292UL, 604900912UL, 610898209UL, 1473244091UL, 735444769UL, 1540599852UL,
3036173307UL, 1032410949UL, 3672467167UL, 68684525UL, 1738833632UL, 973022
696UL, 519684794UL, 2291130512UL, 503032614UL, 3658059597UL, 1500301452UL,
2400186105UL, 3498787788UL, 422428426UL, 3879256913UL, 3923611748UL, 412070
4752UL, 298972869UL, 1718342816UL, 2541691685UL, 2323881484UL, 1686027891UL
, 4223212804UL, 1812334632UL, 3717655725UL, 2109094458UL, 642431972UL, 2621
896355UL, 2572404999UL, 3590094954UL, 1837882537UL, 2949609717UL, 139235303
2UL, 1733137169UL, 2674005018UL, 3252348987UL, 1526077846UL, 451078217UL, 2
591688848UL, 12635611UL, 3971261781UL, 2417951415UL, 1789383090UL, 11401838
90UL, 1720695967UL, 2906966040UL, 3465605863UL, 2793804971UL, 1054433135UL,
2345087879UL, 915518921UL, 3079297626UL, 1793223686UL, 1055357758UL, 91427
1617UL, 791633499UL, 306802527UL, 2754998083UL, 3179053982UL, 598442002UL,
324402573UL, 2473418737UL, 3566888111UL, 2832258993UL, 4260365359UL, 216804
6398UL, 2097776414UL, 3852730296UL, 3643260763UL, 2163607277UL, 2595175979U
L, 3957721904UL, 215187467UL, 2369406909UL, 1929000706UL, 657446369UL, 3779
486126UL, 810692262UL, 1337974799UL, 4030350704UL, 1865557469UL, 1127012865
UL, 1026825278UL, 593628480UL, 1817383139UL, 3414354529UL, 361018423UL, 203
612980UL, 2102950440UL, 3407143936UL, 1739372987UL, 1475218277UL, 282758053
9UL, 305467658UL, 3292706746UL, 825045562UL, 1900092336UL, 3697343089UL, 76
0470915UL, 587414402UL, 2000637694UL, 343303227UL, 3891626781UL, 2757016765
UL, 1090707384UL, 4015377800UL, 745490961UL, 2855539022UL, 36229159UL, 1591
415533UL, 2208656873UL, 3920625546UL, 2614469314UL, 213338525UL, 886212578U
L, 2729976209UL, 827771411UL, 2430496777UL, 1198164420UL, 2644841698UL, 192
2667440UL, 2747674190UL, 1461695896UL, 1770331341UL, 1641501876UL, 35753531
1UL, 919857376UL, 3053795110UL, 3533531372UL, 3134337355UL, 1004072597UL, 2
01138876UL, 3221275220UL, 1589659138UL, 294382235UL, 1148950143UL, 42115797
07UL, 1799726917UL, 2838977761UL, 4172949119UL, 892664404UL, 416103844UL, 2
248389027UL, 1790015671UL, 3925738275UL, 2612357890UL, 3481887924UL, 161995
5951UL, 4188275966UL, 1672572975UL, 2005534713UL, 564854400UL, 747724021UL,
4037561738UL, 1605940213UL, 2620990454UL, 604900912UL, 610898209UL, 147324
4091UL, 735444769UL, 3571225334UL, 3036173307UL, 1032410949UL, 3672467167UL
, 68684525UL, 3327351604UL, 973022696UL, 519684794UL, 2291130512UL, 5030326
14UL, 3814902238UL, 1500301452UL, 2400186105UL, 3498787788UL, 422428426UL,
1756753750UL, 3923611748UL, 4120704752UL, 298972869UL, 1718342816UL, 652903
081UL, 2323881484UL, 1686027891UL, 4223212804UL, 1812334632UL, 1599640566UL
, 2109094458UL, 642431972UL, 2621896355UL, 2572404999UL, 1668409355UL, 1837
882537UL, 2949609717UL, 1392353032UL, 1733137169UL, 3691709793UL, 325234898
7UL, 1526077846UL, 451078217UL, 2591688848UL, 3353622601UL, 3971261781UL, 2
417951415UL, 1789383090UL, 1140183890UL, 4113853791UL, 2906966040UL, 346560
5863UL, 2793804971UL, 1054433135UL, 2195882948UL, 915518921UL, 3079297626UL
, 1793223686UL, 1055357758UL, 898713552UL, 791633499UL, 306802527UL, 275499
8083UL, 3179053982UL, 2469350088UL, 324402573UL, 2473418737UL, 3566888111UL
, 2832258993UL, 1377718274UL, 2168046398UL, 2097776414UL, 3852730296UL, 364
3260763UL, 3492388484UL, 2595175979UL, 3957721904UL, 215187467UL, 236940690
9UL, 4243449339UL, 657446369UL, 3779486126UL, 810692262UL, 1337974799UL, 39
60230785UL, 1865557469UL, 1127012865UL, 1026825278UL, 593628480UL, 73279331
2UL, 3414354529UL, 361018423UL, 203612980UL, 2102950440UL, 2401792405UL, 17
39372987UL, 1475218277UL, 2827580539UL, 305467658UL, 2454275289UL, 82504556
2UL, 1900092336UL, 3697343089UL, 760470915UL, 2146882409UL, 2000637694UL, 3
43303227UL, 3891626781UL, 2757016765UL, 3997473261UL, 4015377800UL, 7454909
61UL, 2855539022UL, 36229159UL, 2375394427UL, 2208656873UL, 3920625546UL, 2
614469314UL, 213338525UL, 2055366274UL, 2729976209UL, 827771411UL, 24304967
77UL, 1198164420UL, 1789631187UL, 1922667440UL, 2747674190UL, 1461695896UL,
1770331341UL, 4284442852UL, 357535311UL, 919857376UL, 3053795110UL, 353353
1372UL, 2124270060UL, 1004072597UL, 201138876UL, 3221275220UL, 1589659138UL
, 1418386120UL, 1148950143UL, 4211579707UL, 1799726917UL, 2838977761UL, 354
0708069UL, 892664404UL, 416103844UL, 2248389027UL, 1790015671UL, 3936883UL,
2612357890UL, 3481887924UL, 1619955951UL, 4188275966UL, 2963623483UL, 2005
534713UL, 564854400UL, 747724021UL, 4037561738UL, 3431155922UL, 2620990454U
L, 604900912UL, 610898209UL, 1473244091UL, 3880001339UL, 2879060316UL, 3300
897679UL, 3960972039UL, 3201086624UL, 3814462934UL, 3426650044UL, 193088163
2UL, 1981178788UL, 2956279691UL, 4272406256UL, 372705521UL, 1359389771UL, 1
590302979UL, 3940206208UL, 3817999127UL, 2527835456UL, 2739078164UL, 716997
849UL, 3235607043UL, 2550297745UL, 3688700200UL, 354502605UL, 2285793656UL,
2339138034UL, 3912354142UL, 2262255668UL, 469322622UL, 1319943359UL, 19161
01235UL, 200441823UL, 509436982UL, 2160284593UL, 1687919695UL, 4153615582UL
, 495735041UL, 3694469424UL, 2086893117UL, 4223008799UL, 105344742UL, 16980
33424UL, 1149223145UL, 4183918790UL, 4176151950UL, 415739351UL, 817762972UL
, 3768072560UL, 1931430949UL, 2698979439UL, 3481477932UL, 1994322914UL, 407
8299950UL, 1268233995UL, 3254069145UL, 91029129UL, 498234704UL, 1636613942U
L, 3710087092UL, 3876816560UL, 3510446387UL, 3870169008UL, 1370156410UL, 24
42498047UL, 2324396523UL, 1258730334UL, 621954739UL, 1053015373UL, 49182071
7UL, 3386515432UL, 2203703266UL, 120167176UL, 2383669740UL, 1038666440UL, 2
927342870UL, 3583197824UL, 1236241846UL, 2474675929UL, 679052891UL, 2451259
584UL, 2177706146UL, 606842882UL, 3546980104UL, 2289281509UL, 353873434UL,
2041926837UL, 1238346748UL, 2729109726UL, 2843938395UL, 2938124210UL, 25544
43866UL, 1494477920UL, 693378319UL, 2020963566UL, 2000385949UL, 3744098787U
L, 650307220UL, 2631327075UL, 1529128757UL, 595871428UL, 3206666562UL, 4580
62987UL, 875238192UL, 3729317374UL, 1368843921UL, 3478430230UL, 3234384578U
L, 3232435428UL, 321359326UL, 994274524UL, 361184397UL, 4285497594UL, 91526
3578UL, 1486882838UL, 9988613UL, 829077170UL, 677216046UL, 4141828204UL, 16
5804609UL, 1086678519UL, 2933434608UL, 1351662802UL, 2640085040UL, 26115029
32UL, 2033698714UL, 2008873254UL, 3995557835UL, 1020873906UL, 67873555UL, 2
230337823UL, 1263800417UL, 1148712155UL, 3985159589UL, 2979503513UL, 285471
4997UL, 1539343345UL, 2751484352UL, 1569100732UL, 2020758949UL, 2126757134U
L, 3426641899UL, 2808587825UL, 1953320148UL, 1096398464UL, 1502907172UL, 37
51230087UL, 765557661UL, 765290990UL, 3056075500UL, 2040620632UL, 422573751
UL, 3613558930UL, 1741145769UL, 273531216UL, 837238736UL, 494297893UL, 2903
251124UL, 1636782182UL, 4256592784UL, 3652746656UL, 4258393217UL, }, | | {850664906UL, 2293210629UL, 1517805917UL, 1215500405UL, 1612415445UL, 64538
8200UL, 824349799UL, 3517232886UL, 4075591755UL, 3089899292UL, 4249786064UL
, 3811424903UL, 1100783479UL, 53649761UL, 2817264826UL, 3159462529UL, 16548
48550UL, 950025444UL, 3095510002UL, 4080567211UL, 4111078399UL, 3241719305U
L, 2788212779UL, 4256963770UL, 2426893717UL, 4190211142UL, 1420776905UL, 37
80537969UL, 1102912875UL, 1657948873UL, 3354905256UL, 2519610308UL, 5157776
63UL, 3396785394UL, 1832603711UL, 1154211550UL, 1915690212UL, 1933919046UL,
789578337UL, 337961173UL, 1359089498UL, 2249086205UL, 3417955173UL, 862571
348UL, 528120760UL, 1265685672UL, 1970052076UL, 3585976752UL, 3645339918UL,
312171257UL, 1360991400UL, 1994321680UL, 2327168468UL, 2540437053UL, 11804
83641UL, 2217962701UL, 182726833UL, 590204372UL, 1904496495UL, 2545607041UL
, 3697978033UL, 1084030545UL, 3397906968UL, 2192325323UL, 2704204176UL, 106
9092002UL, 2364406907UL, 1578647245UL, 3561974633UL, 3437665426UL, 14641273
05UL, 1616628807UL, 2243114101UL, 3639967880UL, 1702613633UL, 2437350057UL,
39991274UL, 2024323584UL, 3795072940UL, 3604530798UL, 443099203UL, 6435362
12UL, 1919517328UL, 3931285769UL, 427935569UL, 276421624UL, 2492081750UL, 2
62729512UL, 3088549877UL, 2922650665UL, 1816283755UL, 4246096489UL, 8425759
14UL, 1460435650UL, 3050522190UL, 2640849794UL, 3697925816UL, 3465779075UL,
3856929655UL, 1365559780UL, 2897029415UL, 2747033756UL, 3611830629UL, 1891
542518UL, 1897590206UL, 437451803UL, 677924906UL, 123809117UL, 3940574372UL
, 687640291UL, 3488484529UL, 470218446UL, 1092571016UL, 1537938503UL, 10733
23937UL, 611300083UL, 3809285994UL, 3975678726UL, 925845389UL, 2514775760UL
, 2859302390UL, 2761919483UL, 993285307UL, 164095287UL, 3736193671UL, 20789
46336UL, 1418537059UL, 1202525920UL, 4234029440UL, 1313593624UL, 2484428922
UL, 1833969372UL, 661495122UL, 2217907395UL, 2795045321UL, 2950835531UL, 14
02379354UL, 351314168UL, 1902476749UL, 1914974334UL, 2873973176UL, 13212036
03UL, 3316118265UL, 3282193947UL, 1342191737UL, 793441242UL, 3281524559UL,
296088733UL, 487851702UL, 712098215UL, 1388727135UL, 1705533557UL, 35578002
92UL, 399729516UL, 1355829467UL, 291276309UL, 421164833UL, 1318404599UL, 20
64519128UL, 1161612642UL, 2076623594UL, 850664906UL, 2293210629UL, 15178059
17UL, 1215500405UL, 3847487204UL, 645388200UL, 824349799UL, 3517232886UL, 4
075591755UL, 2755872609UL, 4249786064UL, 3811424903UL, 1100783479UL, 536497
61UL, 1417544262UL, 3159462529UL, 1654848550UL, 950025444UL, 3095510002UL,
1908900347UL, 4111078399UL, 3241719305UL, 2788212779UL, 4256963770UL, 37502
58343UL, 4190211142UL, 1420776905UL, 3780537969UL, 1102912875UL, 1690550UL,
3354905256UL, 2519610308UL, 515777663UL, 3396785394UL, 2658162202UL, 11542
11550UL, 1915690212UL, 1933919046UL, 789578337UL, 189880016UL, 1359089498UL
, 2249086205UL, 3417955173UL, 862571348UL, 998719835UL, 1265685672UL, 19700
52076UL, 3585976752UL, 3645339918UL, 2973042959UL, 1360991400UL, 1994321680
UL, 2327168468UL, 2540437053UL, 2283905032UL, 2217962701UL, 182726833UL, 59
0204372UL, 1904496495UL, 110719262UL, 3697978033UL, 1084030545UL, 339790696
8UL, 2192325323UL, 4133333579UL, 1069092002UL, 2364406907UL, 1578647245UL,
3561974633UL, 3629845331UL, 1464127305UL, 1616628807UL, 2243114101UL, 36399
67880UL, 3256744141UL, 2437350057UL, 39991274UL, 2024323584UL, 3795072940UL
, 1024703328UL, 443099203UL, 643536212UL, 1919517328UL, 3931285769UL, 27551
67056UL, 276421624UL, 2492081750UL, 262729512UL, 3088549877UL, 2817867653UL
, 1816283755UL, 4246096489UL, 842575914UL, 1460435650UL, 2276077438UL, 2640
849794UL, 3697925816UL, 3465779075UL, 3856929655UL, 130551477UL, 2897029415
UL, 2747033756UL, 3611830629UL, 1891542518UL, 804565809UL, 437451803UL, 677
924906UL, 123809117UL, 3940574372UL, 2446610749UL, 3488484529UL, 470218446U
L, 1092571016UL, 1537938503UL, 1502147484UL, 611300083UL, 3809285994UL, 397
5678726UL, 925845389UL, 872826112UL, 2859302390UL, 2761919483UL, 993285307U
L, 164095287UL, 3901654538UL, 2078946336UL, 1418537059UL, 1202525920UL, 423
4029440UL, 704759480UL, 2484428922UL, 1833969372UL, 661495122UL, 2217907395
UL, 3287413716UL, 2950835531UL, 1402379354UL, 351314168UL, 1902476749UL, 20
33316109UL, 2873973176UL, 1321203603UL, 3316118265UL, 3282193947UL, 1316780
684UL, 793441242UL, 3281524559UL, 296088733UL, 487851702UL, 314311643UL, 13
88727135UL, 1705533557UL, 3557800292UL, 399729516UL, 1660074989UL, 29127630
9UL, 421164833UL, 1318404599UL, 2064519128UL, 3156334112UL, 2076623594UL, 8
50664906UL, 2293210629UL, 1517805917UL, 335452425UL, 3847487204UL, 64538820
0UL, 824349799UL, 3517232886UL, 954487767UL, 2755872609UL, 4249786064UL, 38
11424903UL, 1100783479UL, 3408594583UL, 1417544262UL, 3159462529UL, 1654848
550UL, 950025444UL, 324339737UL, 1908900347UL, 4111078399UL, 3241719305UL,
2788212779UL, 1890540205UL, 3750258343UL, 4190211142UL, 1420776905UL, 37805
37969UL, 3716648585UL, 1690550UL, 3354905256UL, 2519610308UL, 515777663UL,
3758156132UL, 2658162202UL, 1154211550UL, 1915690212UL, 1933919046UL, 84414
9171UL, 189880016UL, 1359089498UL, 2249086205UL, 3417955173UL, 1031812215UL
, 998719835UL, 1265685672UL, 1970052076UL, 3585976752UL, 3174204115UL, 2973
042959UL, 1360991400UL, 1994321680UL, 2327168468UL, 714016907UL, 2283905032
UL, 2217962701UL, 182726833UL, 590204372UL, 2151450260UL, 110719262UL, 3697
978033UL, 1084030545UL, 3397906968UL, 767772303UL, 4133333579UL, 1069092002
UL, 2364406907UL, 1578647245UL, 42955292UL, 3629845331UL, 1464127305UL, 161
6628807UL, 2243114101UL, 3222189776UL, 3256744141UL, 2437350057UL, 39991274
UL, 2024323584UL, 3142424684UL, 1024703328UL, 443099203UL, 643536212UL, 191
9517328UL, 918511196UL, 2755167056UL, 276421624UL, 2492081750UL, 262729512U
L, 4246877536UL, 2817867653UL, 1816283755UL, 4246096489UL, 842575914UL, 142
5765936UL, 2276077438UL, 2640849794UL, 3697925816UL, 3465779075UL, 14917025
26UL, 130551477UL, 2897029415UL, 2747033756UL, 3611830629UL, 1844578694UL,
804565809UL, 437451803UL, 677924906UL, 123809117UL, 3419189841UL, 244661074
9UL, 3488484529UL, 470218446UL, 1092571016UL, 3272535988UL, 1502147484UL, 6
11300083UL, 3809285994UL, 3975678726UL, 2853681168UL, 872826112UL, 28593023
90UL, 2761919483UL, 993285307UL, 1434560128UL, 3901654538UL, 2078946336UL,
1418537059UL, 1202525920UL, 2530097881UL, 704759480UL, 2484428922UL, 183396
9372UL, 661495122UL, 503878844UL, 3287413716UL, 2950835531UL, 1402379354UL,
351314168UL, 4131886119UL, 2033316109UL, 2873973176UL, 1321203603UL, 33161
18265UL, 237900321UL, 1316780684UL, 793441242UL, 3281524559UL, 296088733UL,
1730738847UL, 314311643UL, 1388727135UL, 1705533557UL, 3557800292UL, 15538
35665UL, 1660074989UL, 291276309UL, 421164833UL, 1318404599UL, 964731488UL,
3156334112UL, 2076623594UL, 850664906UL, 2293210629UL, 1105350579UL, 33545
2425UL, 3847487204UL, 645388200UL, 824349799UL, 2789953706UL, 954487767UL,
2755872609UL, 4249786064UL, 3811424903UL, 3937839949UL, 3408594583UL, 14175
44262UL, 3159462529UL, 1654848550UL, 624060530UL, 324339737UL, 1908900347UL
, 4111078399UL, 3241719305UL, 2294919498UL, 1890540205UL, 3750258343UL, 419
0211142UL, 1420776905UL, 2279133729UL, 3716648585UL, 1690550UL, 3354905256U
L, 2519610308UL, 3563975602UL, 3758156132UL, 2658162202UL, 1154211550UL, 19
15690212UL, 3505586122UL, 844149171UL, 189880016UL, 1359089498UL, 224908620
5UL, 2389487504UL, 1031812215UL, 998719835UL, 1265685672UL, 1970052076UL, 2
798611919UL, 3174204115UL, 2973042959UL, 1360991400UL, 1994321680UL, 168413
4678UL, 714016907UL, 2283905032UL, 2217962701UL, 182726833UL, 1734988742UL,
2151450260UL, 110719262UL, 3697978033UL, 1084030545UL, 159906818UL, 767772
303UL, 4133333579UL, 1069092002UL, 2364406907UL, 1290801202UL, 42955292UL,
3629845331UL, 1464127305UL, 1616628807UL, 987794861UL, 3222189776UL, 325674
4141UL, 2437350057UL, 39991274UL, 3644076751UL, 3142424684UL, 1024703328UL,
443099203UL, 643536212UL, 1487589384UL, 918511196UL, 2755167056UL, 2764216
24UL, 2492081750UL, 137688638UL, 4246877536UL, 2817867653UL, 1816283755UL,
4246096489UL, 1518475380UL, 1425765936UL, 2276077438UL, 2640849794UL, 36979
25816UL, 4226506771UL, 1491702526UL, 130551477UL, 2897029415UL, 2747033756U
L, 2033599579UL, 1844578694UL, 804565809UL, 437451803UL, 677924906UL, 27490
65512UL, 3419189841UL, 2446610749UL, 3488484529UL, 470218446UL, 290444026UL
, 3272535988UL, 1502147484UL, 611300083UL, 3809285994UL, 2546040767UL, 2853
681168UL, 872826112UL, 2859302390UL, 2761919483UL, 4097961150UL, 1434560128
UL, 3901654538UL, 2078946336UL, 1418537059UL, 2725734455UL, 2530097881UL, 7
04759480UL, 2484428922UL, 1833969372UL, 3999408333UL, 503878844UL, 32874137
16UL, 2950835531UL, 1402379354UL, 3861442503UL, 4131886119UL, 2033316109UL,
2873973176UL, 1321203603UL, 1267331405UL, 237900321UL, 1316780684UL, 79344
1242UL, 3281524559UL, 1273427916UL, 1730738847UL, 314311643UL, 1388727135UL
, 1705533557UL, 1474310231UL, 1553835665UL, 1660074989UL, 291276309UL, 4211
64833UL, 3884815658UL, 3088049345UL, 3307042227UL, 3228948601UL, 1717605083
UL, 1864502063UL, 3799516572UL, 2372822470UL, 2691586476UL, 1172840854UL, 1
577099080UL, 870101866UL, 2139291021UL, 406996656UL, 255568268UL, 897760202
UL, 674745664UL, 885214361UL, 3753233375UL, 3015215223UL, 1711461259UL, 324
1363282UL, 2125360928UL, 2493601640UL, 2350228245UL, 3434627328UL, 20956429
63UL, 3360932494UL, 3287396242UL, 4070512427UL, 3415702664UL, 1958354224UL,
3280206940UL, 3929504236UL, 3390499817UL, 4144225735UL, 3621750606UL, 3205
006592UL, 3495743785UL, 269239326UL, 2181299371UL, 2898796651UL, 2613623219
UL, 3988711298UL, 2162437858UL, 949553433UL, 3289670000UL, 3559525307UL, 33
66925567UL, 2112148665UL, 955626393UL, 1790865381UL, 699223558UL, 388958430
1UL, 1020750250UL, 4105283899UL, 2295851818UL, 4045668915UL, 2224770025UL,
766386910UL, 4265157386UL, 89139307UL, 2099710177UL, 1012450874UL, 18754924
46UL, 1927399417UL, 767450812UL, 654474783UL, 4265293038UL, 4041215389UL, 4
102336947UL, 4263617328UL, 2135826340UL, 2317231535UL, 3773895729UL, 403151
111UL, 1400693138UL, 4255050194UL, 755369466UL, 2325764302UL, 2617301159UL,
4165707294UL, 1206304709UL, 2415645397UL, 4276004841UL, 1457022279UL, 6626
60652UL, 795140282UL, 828519889UL, 805830562UL, 1179976369UL, 2212548232UL,
755708248UL, 1034682071UL, 899950902UL, 1906046264UL, 1861009040UL, 310711
525UL, 920739741UL, 2322414272UL, 3179236470UL, 81822135UL, 4111390320UL, 1
800166783UL, 112253014UL, 688771939UL, 1050990794UL, 3124647483UL, 28705217
1UL, 1363630156UL, 3447798279UL, 1405733552UL, 3075862538UL, 1682808202UL,
1595154222UL, 1173705692UL, 680713285UL, 2748212230UL, 568610527UL, 3434965
538UL, 1114942930UL, 2835858745UL, 2575992250UL, 3243355150UL, 2127580225UL
, 1855934450UL, 3915941751UL, 2228679809UL, 1514780124UL, 1506688039UL, 103
3083295UL, 793807083UL, 1120681149UL, 4105670165UL, 3999570340UL, 208302013
1UL, 1213356023UL, 3684882757UL, 3375797774UL, 3577986103UL, 2092046164UL,
2593847443UL, 1826450612UL, 367828409UL, 3198272513UL, 1941316667UL, 943707
510UL, 907134807UL, 2020457947UL, 1462193665UL, 2964617539UL, 4216491663UL,
2625270800UL, 2395371467UL, 3691003028UL, 3659016793UL, 2381847054UL, 3513
105567UL, 3013019506UL, 2731245927UL, }, {1680024716UL, 2112340059UL, 33874
75367UL, 2080916186UL, 1431532386UL, 3907378472UL, 2636491350UL, 2176128529
UL, 2236616671UL, 3736851460UL, 2604001339UL, 3893075234UL, 3495918635UL, 4
116370522UL, 1384310379UL, 3660102574UL, 2030233939UL, 2759207091UL, 493479
23UL, 97526506UL, 2566932710UL, 1566181275UL, 3127827248UL, 578401670UL, 14
99229308UL, 2581732444UL, 279715551UL, 809690877UL, 1438444015UL, 878935323
UL, 1495277039UL, 3417305339UL, 2858903785UL, 3074075088UL, 603749086UL, 23
70669734UL, 391683868UL, 3933465331UL, 2884128106UL, 1478317876UL, 18649883
35UL, 2925823809UL, 4133578805UL, 218104493UL, 368652174UL, 1998600344UL, 1
109346044UL, 1716435313UL, 415435111UL, 91393686UL, 2536620737UL, 144006857
3UL, 481874870UL, 142128108UL, 988825519UL, 2077118779UL, 2858045339UL, 406
8162251UL, 115593872UL, 1364244587UL, 3550167006UL, 3728768059UL, 177242368
5UL, 2504624145UL, 248732306UL, 1412607307UL, 4081166331UL, 154438218UL, 16
52901877UL, 3932533490UL, 3142799969UL, 3154073676UL, 3112018078UL, 2757873
595UL, 2364830126UL, 2855791484UL, 793851407UL, 507785167UL, 263713916UL, 4
060700051UL, 3291978358UL, 1584226715UL, 2546417990UL, 450747961UL, 2951067
700UL, 2706009093UL, 1788578194UL, 4030171132UL, 2610979903UL, 573420740UL,
4269115622UL, 2180305819UL, 2646894726UL, 716649335UL, 3875715683UL, 85342
8184UL, 2436760738UL, 4190071217UL, 2754423535UL, 540698101UL, 4082489821UL
, 741976046UL, 267559495UL, 1591532642UL, 2500610323UL, 3203248679UL, 14731
2102UL, 2772368222UL, 1412987047UL, 2295185573UL, 1932341300UL, 898396308UL
, 1837129999UL, 3113914292UL, 2613354524UL, 3141601915UL, 276087167UL, 1887
389351UL, 757801450UL, 3752353732UL, 2745818074UL, 1442953464UL, 3802648347
UL, 223728071UL, 2169947402UL, 1338125300UL, 3642174036UL, 2794462634UL, 23
26349851UL, 862746036UL, 3577092599UL, 627103363UL, 552173564UL, 4142604459
UL, 2310329406UL, 583522272UL, 189323282UL, 1217612313UL, 73550248UL, 24346
92829UL, 2757269706UL, 2392210091UL, 3032922600UL, 3573904125UL, 2897178037
UL, 2632631469UL, 3085332665UL, 3775619904UL, 2563291734UL, 1351375865UL, 4
043427793UL, 1803743084UL, 3112116579UL, 522940594UL, 2690374983UL, 2613871
529UL, 3810037031UL, 1765642390UL, 534554747UL, 1930852049UL, 2264349344UL,
1680024716UL, 2112340059UL, 3387475367UL, 2080916186UL, 75966494UL, 390737
8472UL, 2636491350UL, 2176128529UL, 2236616671UL, 2372987046UL, 2604001339U
L, 3893075234UL, 3495918635UL, 4116370522UL, 534929913UL, 3660102574UL, 203
0233939UL, 2759207091UL, 49347923UL, 987575186UL, 2566932710UL, 1566181275U
L, 3127827248UL, 578401670UL, 3731513754UL, 2581732444UL, 279715551UL, 8096
90877UL, 1438444015UL, 2185866850UL, 1495277039UL, 3417305339UL, 2858903785
UL, 3074075088UL, 4198538376UL, 2370669734UL, 391683868UL, 3933465331UL, 28
84128106UL, 1400216510UL, 1864988335UL, 2925823809UL, 4133578805UL, 2181044
93UL, 2798390374UL, 1998600344UL, 1109346044UL, 1716435313UL, 415435111UL,
1892535124UL, 2536620737UL, 1440068573UL, 481874870UL, 142128108UL, 3290827
40UL, 2077118779UL, 2858045339UL, 4068162251UL, 115593872UL, 2644000449UL,
3550167006UL, 3728768059UL, 1772423685UL, 2504624145UL, 2140118619UL, 14126
07307UL, 4081166331UL, 154438218UL, 1652901877UL, 3804911318UL, 3142799969U
L, 3154073676UL, 3112018078UL, 2757873595UL, 50297646UL, 2855791484UL, 7938
51407UL, 507785167UL, 263713916UL, 3324588195UL, 3291978358UL, 1584226715UL
, 2546417990UL, 450747961UL, 3455625012UL, 2706009093UL, 1788578194UL, 4030
171132UL, 2610979903UL, 3835380965UL, 4269115622UL, 2180305819UL, 264689472
6UL, 716649335UL, 2607142354UL, 853428184UL, 2436760738UL, 4190071217UL, 27
54423535UL, 456808691UL, 4082489821UL, 741976046UL, 267559495UL, 1591532642
UL, 2722205042UL, 3203248679UL, 147312102UL, 2772368222UL, 1412987047UL, 19
50543946UL, 1932341300UL, 898396308UL, 1837129999UL, 3113914292UL, 42861639
2UL, 3141601915UL, 276087167UL, 1887389351UL, 757801450UL, 963534966UL, 274
5818074UL, 1442953464UL, 3802648347UL, 223728071UL, 229039300UL, 1338125300
UL, 3642174036UL, 2794462634UL, 2326349851UL, 206115203UL, 3577092599UL, 62
7103363UL, 552173564UL, 4142604459UL, 1492461846UL, 583522272UL, 189323282U
L, 1217612313UL, 73550248UL, 3552211807UL, 2757269706UL, 2392210091UL, 3032
922600UL, 3573904125UL, 810640644UL, 2632631469UL, 3085332665UL, 3775619904
UL, 2563291734UL, 922608790UL, 4043427793UL, 1803743084UL, 3112116579UL, 52
2940594UL, 1785093944UL, 2613871529UL, 3810037031UL, 1765642390UL, 53455474
7UL, 3528050076UL, 2264349344UL, 1680024716UL, 2112340059UL, 3387475367UL,
3295682653UL, 75966494UL, 3907378472UL, 2636491350UL, 2176128529UL, 3574915
532UL, 2372987046UL, 2604001339UL, 3893075234UL, 3495918635UL, 1280296085UL
, 534929913UL, 3660102574UL, 2030233939UL, 2759207091UL, 299776535UL, 98757
5186UL, 2566932710UL, 1566181275UL, 3127827248UL, 3874691533UL, 3731513754U
L, 2581732444UL, 279715551UL, 809690877UL, 3100791084UL, 2185866850UL, 1495
277039UL, 3417305339UL, 2858903785UL, 1310351481UL, 4198538376UL, 237066973
4UL, 391683868UL, 3933465331UL, 2749085130UL, 1400216510UL, 1864988335UL, 2
925823809UL, 4133578805UL, 3352814594UL, 2798390374UL, 1998600344UL, 110934
6044UL, 1716435313UL, 1571752941UL, 1892535124UL, 2536620737UL, 1440068573U
L, 481874870UL, 2485033697UL, 329082740UL, 2077118779UL, 2858045339UL, 4068
162251UL, 3837440666UL, 2644000449UL, 3550167006UL, 3728768059UL, 177242368
5UL, 1176559812UL, 2140118619UL, 1412607307UL, 4081166331UL, 154438218UL, 2
902622972UL, 3804911318UL, 3142799969UL, 3154073676UL, 3112018078UL, 240339
1233UL, 50297646UL, 2855791484UL, 793851407UL, 507785167UL, 2351826747UL, 3
324588195UL, 3291978358UL, 1584226715UL, 2546417990UL, 746876926UL, 3455625
012UL, 2706009093UL, 1788578194UL, 4030171132UL, 3779307353UL, 3835380965UL
, 4269115622UL, 2180305819UL, 2646894726UL, 2602235234UL, 2607142354UL, 853
428184UL, 2436760738UL, 4190071217UL, 2066757692UL, 456808691UL, 4082489821
UL, 741976046UL, 267559495UL, 3001080633UL, 2722205042UL, 3203248679UL, 147
312102UL, 2772368222UL, 89950260UL, 1950543946UL, 1932341300UL, 898396308UL
, 1837129999UL, 947911286UL, 428616392UL, 3141601915UL, 276087167UL, 188738
9351UL, 2583987247UL, 963534966UL, 2745818074UL, 1442953464UL, 3802648347UL
, 4229124441UL, 229039300UL, 1338125300UL, 3642174036UL, 2794462634UL, 2472
155633UL, 206115203UL, 3577092599UL, 627103363UL, 552173564UL, 2586882739UL
, 1492461846UL, 583522272UL, 189323282UL, 1217612313UL, 3501549884UL, 35522
11807UL, 2757269706UL, 2392210091UL, 3032922600UL, 740675778UL, 810640644UL
, 2632631469UL, 3085332665UL, 3775619904UL, 3643289881UL, 922608790UL, 4043
427793UL, 1803743084UL, 3112116579UL, 2213337398UL, 1785093944UL, 261387152
9UL, 3810037031UL, 1765642390UL, 762472016UL, 3528050076UL, 2264349344UL, 1
680024716UL, 2112340059UL, 1372272974UL, 3295682653UL, 75966494UL, 39073784
72UL, 2636491350UL, 3117471955UL, 3574915532UL, 2372987046UL, 2604001339UL,
3893075234UL, 915576383UL, 1280296085UL, 534929913UL, 3660102574UL, 203023
3939UL, 346368350UL, 299776535UL, 987575186UL, 2566932710UL, 1566181275UL,
3535223896UL, 3874691533UL, 3731513754UL, 2581732444UL, 279715551UL, 245689
4951UL, 3100791084UL, 2185866850UL, 1495277039UL, 3417305339UL, 1618871086U
L, 1310351481UL, 4198538376UL, 2370669734UL, 391683868UL, 2009676005UL, 274
9085130UL, 1400216510UL, 1864988335UL, 2925823809UL, 58955107UL, 3352814594
UL, 2798390374UL, 1998600344UL, 1109346044UL, 3273979614UL, 1571752941UL, 1
892535124UL, 2536620737UL, 1440068573UL, 1174168447UL, 2485033697UL, 329082
740UL, 2077118779UL, 2858045339UL, 4062921629UL, 3837440666UL, 2644000449UL
, 3550167006UL, 3728768059UL, 2642133401UL, 1176559812UL, 2140118619UL, 141
2607307UL, 4081166331UL, 3124905304UL, 2902622972UL, 3804911318UL, 31427999
69UL, 3154073676UL, 1449454613UL, 2403391233UL, 50297646UL, 2855791484UL, 7
93851407UL, 3514201526UL, 2351826747UL, 3324588195UL, 3291978358UL, 1584226
715UL, 3636681672UL, 746876926UL, 3455625012UL, 2706009093UL, 1788578194UL,
3451519459UL, 3779307353UL, 3835380965UL, 4269115622UL, 2180305819UL, 3987
989524UL, 2602235234UL, 2607142354UL, 853428184UL, 2436760738UL, 2151617107
UL, 2066757692UL, 456808691UL, 4082489821UL, 741976046UL, 3590081269UL, 300
1080633UL, 2722205042UL, 3203248679UL, 147312102UL, 3432947806UL, 89950260U
L, 1950543946UL, 1932341300UL, 898396308UL, 3828432864UL, 947911286UL, 4286
16392UL, 3141601915UL, 276087167UL, 2517666433UL, 2583987247UL, 963534966UL
, 2745818074UL, 1442953464UL, 2223986807UL, 4229124441UL, 229039300UL, 1338
125300UL, 3642174036UL, 1053796945UL, 2472155633UL, 206115203UL, 3577092599
UL, 627103363UL, 1113276084UL, 2586882739UL, 1492461846UL, 583522272UL, 189
323282UL, 1490604990UL, 3501549884UL, 3552211807UL, 2757269706UL, 239221009
1UL, 3545407532UL, 740675778UL, 810640644UL, 2632631469UL, 3085332665UL, 75
5862267UL, 3643289881UL, 922608790UL, 4043427793UL, 1803743084UL, 195416663
0UL, 2213337398UL, 1785093944UL, 2613871529UL, 3810037031UL, 3042935707UL,
3162182177UL, 2791346436UL, 1901925289UL, 863100941UL, 3367519168UL, 197262
3238UL, 3664303070UL, 604922059UL, 3026817982UL, 1436412310UL, 4096180631UL
, 1597561857UL, 4206212303UL, 4127914332UL, 3228677359UL, 3985733659UL, 359
7290113UL, 4251197894UL, 3451370603UL, 609679338UL, 3360835257UL, 137223988
5UL, 638572328UL, 3806422284UL, 3974147336UL, 1804280837UL, 4209089291UL, 2
021797469UL, 3557188838UL, 409727186UL, 2114649178UL, 687702120UL, 25424459
92UL, 1235991799UL, 460479179UL, 2008348175UL, 887884478UL, 3942327811UL, 2
999928223UL, 4171339789UL, 2286339235UL, 1293442231UL, 1575942850UL, 761224
75UL, 1440527701UL, 2006558403UL, 1544148172UL, 895899367UL, 681826913UL, 4
094701935UL, 3995413790UL, 1027509154UL, 2264990896UL, 1938238113UL, 213430
250UL, 222469320UL, 609726517UL, 3581538106UL, 492802663UL, 120480843UL, 17
20004062UL, 1132674507UL, 911082758UL, 2909148131UL, 566658805UL, 396411444
5UL, 3483602509UL, 1793438750UL, 165562604UL, 3641830063UL, 2394205521UL, 3
404874822UL, 1672998096UL, 916151953UL, 1141264477UL, 3171661340UL, 3803396
219UL, 3018337382UL, 1863902683UL, 2474641928UL, 3250365071UL, 3897886220UL
, 1219701051UL, 51332576UL, 1358614881UL, 1707407492UL, 3670647816UL, 92335
7625UL, 343687395UL, 3991339686UL, 3913575403UL, 1267727936UL, 4001357856UL
, 3820224848UL, 2942896724UL, 3505936742UL, 1403285299UL, 1992762049UL, 567
748449UL, 2202721585UL, 2781324216UL, 1724850068UL, 2408314541UL, 307397581
3UL, 3992810029UL, 2475242354UL, 540562053UL, 2185198943UL, 3759352041UL, 3
373885614UL, 1132999410UL, 1097554565UL, 4089342358UL, 3239542922UL, 245174
8646UL, 407290679UL, 3188103200UL, 1708016248UL, 26848241UL, 2796711130UL,
3090711568UL, 4068389322UL, 3420916085UL, 3137567033UL, 2877819818UL, 22133
454UL, 4629160UL, 3703695249UL, 1920151708UL, 1175452162UL, 130015299UL, 33
31834713UL, 1099225384UL, 689254331UL, 1851083761UL, 2654970209UL, 32592979
36UL, 3742819314UL, 3524284766UL, 2291819083UL, 3494031861UL, 16242889UL, 3
545082774UL, 1997878108UL, 777447699UL, 4244916543UL, 3508640253UL, 3782278
393UL, 2107258964UL, 2139074576UL, 1383217899UL, 2337934322UL, 3181899620UL
, 1285955765UL, 2989610020UL, 3326862146UL, 1168587380UL, 801203532UL, 3020
809957UL, }, {3810471203UL, 1017064446UL, 1595207573UL, 441087832UL, 332674
6890UL, 3294064431UL, 167972517UL, 3625210015UL, 1011845006UL, 2980240819UL
, 1778354660UL, 3041730987UL, 1598611350UL, 2015169745UL, 2321724978UL, 339
0812967UL, 2432904511UL, 113261909UL, 3957193232UL, 3806115908UL, 296582892
9UL, 2035392295UL, 3500116619UL, 2881232416UL, 1672212265UL, 1607201428UL,
425148945UL, 1262591961UL, 2221781268UL, 4215047456UL, 2148245850UL, 278748
8981UL, 1077262192UL, 2085467561UL, 3053954888UL, 3584435116UL, 3013084787U
L, 287099941UL, 1290407232UL, 4078552287UL, 2658945475UL, 4251530898UL, 240
3086478UL, 2884923598UL, 3545110453UL, 4105390090UL, 343200643UL, 318988882
1UL, 4086304363UL, 3466483195UL, 259435633UL, 2846377387UL, 497258846UL, 27
2775541UL, 985737911UL, 2957688879UL, 2180784344UL, 3434619542UL, 364338483
8UL, 2228652440UL, 3107480718UL, 2208729807UL, 596436263UL, 3255120711UL, 3
248886970UL, 519242965UL, 602979109UL, 1619614UL, 1391563565UL, 56262588UL,
1584463910UL, 1849038201UL, 728022295UL, 848624947UL, 1813827408UL, 428214
945UL, 1246345586UL, 4213351865UL, 168985863UL, 456608054UL, 4277869380UL,
3886828599UL, 2264054549UL, 3110967170UL, 3138175314UL, 2649164828UL, 33693
78320UL, 3648350039UL, 3524848759UL, 1468470706UL, 3558859222UL, 2669673235
UL, 831851874UL, 4285651092UL, 4224147373UL, 1088456706UL, 231954609UL, 311
8005852UL, 225508069UL, 883105389UL, 856371341UL, 2001356578UL, 639336670UL
, 2363501707UL, 3622399552UL, 4024065226UL, 1093546838UL, 4263608561UL, 185
2072422UL, 425195042UL, 2441102396UL, 296426333UL, 384641750UL, 3559334435U
L, 1757327033UL, 1016016207UL, 3595686646UL, 24777793UL, 623926105UL, 21691
95923UL, 1779396793UL, 646997837UL, 1459728476UL, 2644865980UL, 1994581089U
L, 3956278544UL, 919592580UL, 2153558858UL, 2029633394UL, 3837501009UL, 401
6560170UL, 484838096UL, 3652199054UL, 1971790561UL, 605295089UL, 637470291U
L, 278970544UL, 3574824693UL, 295866521UL, 1755035156UL, 2542341803UL, 1588
716357UL, 1502596918UL, 4124554133UL, 3547049843UL, 1768033045UL, 153173463
0UL, 101448323UL, 3233017580UL, 1793222944UL, 3187853500UL, 186000900UL, 80
3444571UL, 2820254958UL, 2009384608UL, 2384668855UL, 2222812920UL, 63360866
5UL, 2028480056UL, 1258028235UL, 545095949UL, 3810471203UL, 1017064446UL, 1
595207573UL, 441087832UL, 899068662UL, 3294064431UL, 167972517UL, 362521001
5UL, 1011845006UL, 3951305793UL, 1778354660UL, 3041730987UL, 1598611350UL,
2015169745UL, 1885149424UL, 3390812967UL, 2432904511UL, 113261909UL, 395719
3232UL, 3953443155UL, 2965828929UL, 2035392295UL, 3500116619UL, 2881232416U
L, 329153573UL, 1607201428UL, 425148945UL, 1262591961UL, 2221781268UL, 7802
8761UL, 2148245850UL, 2787488981UL, 1077262192UL, 2085467561UL, 647235899UL
, 3584435116UL, 3013084787UL, 287099941UL, 1290407232UL, 1467385694UL, 2658
945475UL, 4251530898UL, 2403086478UL, 2884923598UL, 3489351040UL, 410539009
0UL, 343200643UL, 3189888821UL, 4086304363UL, 3521512280UL, 259435633UL, 28
46377387UL, 497258846UL, 272775541UL, 1367093111UL, 2957688879UL, 218078434
4UL, 3434619542UL, 3643384838UL, 411877686UL, 3107480718UL, 2208729807UL, 5
96436263UL, 3255120711UL, 584605030UL, 519242965UL, 602979109UL, 1619614UL,
1391563565UL, 3902518209UL, 1584463910UL, 1849038201UL, 728022295UL, 84862
4947UL, 1932969318UL, 428214945UL, 1246345586UL, 4213351865UL, 168985863UL,
2770345237UL, 4277869380UL, 3886828599UL, 2264054549UL, 3110967170UL, 2953
581033UL, 2649164828UL, 3369378320UL, 3648350039UL, 3524848759UL, 238035397
7UL, 3558859222UL, 2669673235UL, 831851874UL, 4285651092UL, 1214052447UL, 1
088456706UL, 231954609UL, 3118005852UL, 225508069UL, 1766983646UL, 85637134
1UL, 2001356578UL, 639336670UL, 2363501707UL, 1782816591UL, 4024065226UL, 1
093546838UL, 4263608561UL, 1852072422UL, 1149716600UL, 2441102396UL, 296426
333UL, 384641750UL, 3559334435UL, 2391309970UL, 1016016207UL, 3595686646UL,
24777793UL, 623926105UL, 362098678UL, 1779396793UL, 646997837UL, 145972847
6UL, 2644865980UL, 3238673748UL, 3956278544UL, 919592580UL, 2153558858UL, 2
029633394UL, 115778559UL, 4016560170UL, 484838096UL, 3652199054UL, 19717905
61UL, 737357475UL, 637470291UL, 278970544UL, 3574824693UL, 295866521UL, 398
9745853UL, 2542341803UL, 1588716357UL, 1502596918UL, 4124554133UL, 30168497
44UL, 1768033045UL, 1531734630UL, 101448323UL, 3233017580UL, 4157527581UL,
3187853500UL, 186000900UL, 803444571UL, 2820254958UL, 1980528062UL, 2384668
855UL, 2222812920UL, 633608665UL, 2028480056UL, 3166710281UL, 545095949UL,
3810471203UL, 1017064446UL, 1595207573UL, 693962828UL, 899068662UL, 3294064
431UL, 167972517UL, 3625210015UL, 1486040398UL, 3951305793UL, 1778354660UL,
3041730987UL, 1598611350UL, 2859363132UL, 1885149424UL, 3390812967UL, 2432
904511UL, 113261909UL, 664880478UL, 3953443155UL, 2965828929UL, 2035392295U
L, 3500116619UL, 558081801UL, 329153573UL, 1607201428UL, 425148945UL, 12625
91961UL, 3716247699UL, 78028761UL, 2148245850UL, 2787488981UL, 1077262192UL
, 4206362947UL, 647235899UL, 3584435116UL, 3013084787UL, 287099941UL, 25367
81098UL, 1467385694UL, 2658945475UL, 4251530898UL, 2403086478UL, 3075072413
UL, 3489351040UL, 4105390090UL, 343200643UL, 3189888821UL, 2540485172UL, 35
21512280UL, 259435633UL, 2846377387UL, 497258846UL, 2442427327UL, 136709311
1UL, 2957688879UL, 2180784344UL, 3434619542UL, 1593967423UL, 411877686UL, 3
107480718UL, 2208729807UL, 596436263UL, 1048686529UL, 584605030UL, 51924296
5UL, 602979109UL, 1619614UL, 2072745381UL, 3902518209UL, 1584463910UL, 1849
038201UL, 728022295UL, 846033949UL, 1932969318UL, 428214945UL, 1246345586UL
, 4213351865UL, 1066373275UL, 2770345237UL, 4277869380UL, 3886828599UL, 226
4054549UL, 1877859690UL, 2953581033UL, 2649164828UL, 3369378320UL, 36483500
39UL, 2537763389UL, 2380353977UL, 3558859222UL, 2669673235UL, 831851874UL,
522748140UL, 1214052447UL, 1088456706UL, 231954609UL, 3118005852UL, 1381269
315UL, 1766983646UL, 856371341UL, 2001356578UL, 639336670UL, 667275675UL, 1
782816591UL, 4024065226UL, 1093546838UL, 4263608561UL, 2057337961UL, 114971
6600UL, 2441102396UL, 296426333UL, 384641750UL, 340523210UL, 2391309970UL,
1016016207UL, 3595686646UL, 24777793UL, 3094832341UL, 362098678UL, 17793967
93UL, 646997837UL, 1459728476UL, 1169681568UL, 3238673748UL, 3956278544UL,
919592580UL, 2153558858UL, 388335108UL, 115778559UL, 4016560170UL, 48483809
6UL, 3652199054UL, 1764858181UL, 737357475UL, 637470291UL, 278970544UL, 357
4824693UL, 3671458900UL, 3989745853UL, 2542341803UL, 1588716357UL, 15025969
18UL, 2102871406UL, 3016849744UL, 1768033045UL, 1531734630UL, 101448323UL,
3964942332UL, 4157527581UL, 3187853500UL, 186000900UL, 803444571UL, 3425652
083UL, 1980528062UL, 2384668855UL, 2222812920UL, 633608665UL, 3035373876UL,
3166710281UL, 545095949UL, 3810471203UL, 1017064446UL, 669282349UL, 693962
828UL, 899068662UL, 3294064431UL, 167972517UL, 2007256988UL, 1486040398UL,
3951305793UL, 1778354660UL, 3041730987UL, 2827768941UL, 2859363132UL, 18851
49424UL, 3390812967UL, 2432904511UL, 3700915653UL, 664880478UL, 3953443155U
L, 2965828929UL, 2035392295UL, 1461208330UL, 558081801UL, 329153573UL, 1607
201428UL, 425148945UL, 1700881129UL, 3716247699UL, 78028761UL, 2148245850UL
, 2787488981UL, 2706775080UL, 4206362947UL, 647235899UL, 3584435116UL, 3013
084787UL, 2958545221UL, 2536781098UL, 1467385694UL, 2658945475UL, 425153089
8UL, 2241012567UL, 3075072413UL, 3489351040UL, 4105390090UL, 343200643UL, 4
90164649UL, 2540485172UL, 3521512280UL, 259435633UL, 2846377387UL, 40736118
31UL, 2442427327UL, 1367093111UL, 2957688879UL, 2180784344UL, 1835510773UL,
1593967423UL, 411877686UL, 3107480718UL, 2208729807UL, 3306732468UL, 10486
86529UL, 584605030UL, 519242965UL, 602979109UL, 2978864605UL, 2072745381UL,
3902518209UL, 1584463910UL, 1849038201UL, 3284115169UL, 846033949UL, 19329
69318UL, 428214945UL, 1246345586UL, 194166002UL, 1066373275UL, 2770345237UL
, 4277869380UL, 3886828599UL, 1874087886UL, 1877859690UL, 2953581033UL, 264
9164828UL, 3369378320UL, 4145454028UL, 2537763389UL, 2380353977UL, 35588592
22UL, 2669673235UL, 739345884UL, 522748140UL, 1214052447UL, 1088456706UL, 2
31954609UL, 3605603781UL, 1381269315UL, 1766983646UL, 856371341UL, 20013565
78UL, 2049940324UL, 667275675UL, 1782816591UL, 4024065226UL, 1093546838UL,
152524382UL, 2057337961UL, 1149716600UL, 2441102396UL, 296426333UL, 3195130
788UL, 340523210UL, 2391309970UL, 1016016207UL, 3595686646UL, 180492441UL,
3094832341UL, 362098678UL, 1779396793UL, 646997837UL, 2458167607UL, 1169681
568UL, 3238673748UL, 3956278544UL, 919592580UL, 3421005218UL, 388335108UL,
115778559UL, 4016560170UL, 484838096UL, 2649676374UL, 1764858181UL, 7373574
75UL, 637470291UL, 278970544UL, 2236401278UL, 3671458900UL, 3989745853UL, 2
542341803UL, 1588716357UL, 1241570134UL, 2102871406UL, 3016849744UL, 176803
3045UL, 1531734630UL, 1765654724UL, 3964942332UL, 4157527581UL, 3187853500U
L, 186000900UL, 2189716659UL, 3425652083UL, 1980528062UL, 2384668855UL, 222
2812920UL, 3955466207UL, 2426547616UL, 3846752458UL, 3015538636UL, 23425933
65UL, 3613176865UL, 3484860981UL, 4278370194UL, 1979143878UL, 1159739458UL,
3714038404UL, 396530346UL, 3276617756UL, 3293940597UL, 4050183149UL, 14185
71985UL, 402563753UL, 2702853013UL, 2289900621UL, 2267058511UL, 3482161995U
L, 3375026019UL, 1988640267UL, 3674438074UL, 4124612310UL, 1057883705UL, 43
4730475UL, 3210959778UL, 4102029739UL, 2140938750UL, 3176753074UL, 23569715
12UL, 3969685288UL, 1556275580UL, 2648433428UL, 3959375381UL, 478841344UL,
1496991528UL, 3309714981UL, 569990368UL, 3660587501UL, 2550379574UL, 117751
9842UL, 2652707373UL, 543943404UL, 1912551128UL, 2278132032UL, 1484596780UL
, 3570913985UL, 2982401320UL, 1413776035UL, 3177275459UL, 3036211597UL, 109
1740466UL, 3448424311UL, 1445187645UL, 3205024875UL, 3135795254UL, 82373872
9UL, 3742134467UL, 4066657438UL, 1226311678UL, 2403605393UL, 537573634UL, 3
457409768UL, 1940233423UL, 1761431281UL, 1129427309UL, 2443661283UL, 320081
4257UL, 4094866249UL, 2666869754UL, 604785127UL, 2213464116UL, 3002782918UL
, 468024929UL, 2490681314UL, 3666681384UL, 1583346053UL, 3049668798UL, 3592
153237UL, 2573082448UL, 3082970021UL, 1461796708UL, 832526980UL, 3728763274
UL, 355291229UL, 4029588456UL, 832358279UL, 2125298737UL, 3681181038UL, 324
5535160UL, 1333342738UL, 1868897492UL, 446790068UL, 1278093154UL, 209011861
5UL, 4158925515UL, 4062165914UL, 822726809UL, 1154960183UL, 286518382UL, 11
70424276UL, 2554691236UL, 3674133415UL, 2765714969UL, 2330865375UL, 1908307
334UL, 3537287082UL, 410252600UL, 3977128218UL, 424210327UL, 2919071615UL,
2715518134UL, 64568844UL, 480972649UL, 2488797168UL, 1302817038UL, 22139952
65UL, 4229997295UL, 2200797852UL, 109368057UL, 3033807022UL, 1907400078UL,
645977948UL, 1410909090UL, 3700787906UL, 3375062371UL, 629087832UL, 1344281
719UL, 4249981139UL, 3457543297UL, 1218556849UL, 864222854UL, 1458445945UL,
914545469UL, 3451164212UL, 1088025757UL, 1129933985UL, 953788883UL, 240617
2924UL, 170364546UL, 3505490646UL, 1027553899UL, 2864067776UL, 436854871UL,
1342782209UL, 761167471UL, 2660173631UL, 4159507498UL, 4172028400UL, 24422
54644UL, 2110123720UL, 2315991253UL, 873066601UL, 1725470559UL, 3831299052U
L, 678672031UL, 1585431329UL, 3495750550UL, }, {1998393432UL, 2665389278UL,
3989307699UL, 3267631636UL, 3861682977UL, 3243522970UL, 1243992413UL, 2200
497260UL, 3821883021UL, 4187123083UL, 3451270040UL, 3044132745UL, 210128724
9UL, 2340839784UL, 227040990UL, 1724350416UL, 3228881240UL, 3123386528UL, 4
279362126UL, 3098224464UL, 2635534069UL, 3622906431UL, 206207480UL, 1894245
533UL, 2152374527UL, 1011223653UL, 7271757UL, 2972858087UL, 207942127UL, 33
55362797UL, 2593296740UL, 174093751UL, 3713822176UL, 4212355586UL, 33356052
24UL, 1171716408UL, 2867257989UL, 1522213957UL, 2016192462UL, 4229688395UL,
2174928148UL, 1468226225UL, 3938290338UL, 493240317UL, 3229423344UL, 25854
75729UL, 3112454413UL, 1881171707UL, 2555908056UL, 1997546352UL, 380428329U
L, 3341885423UL, 3307510279UL, 3519476676UL, 3613100811UL, 2555826262UL, 10
9341943UL, 2382715395UL, 3883409616UL, 1593551879UL, 2163678014UL, 33797831
37UL, 2810374300UL, 1516064864UL, 561144874UL, 316017838UL, 1899237567UL, 7
0857401UL, 3435185465UL, 4234661323UL, 2580352177UL, 32879620UL, 4171670150
UL, 1986234067UL, 3589478191UL, 2073132526UL, 2603712175UL, 377997975UL, 24
74419397UL, 3110698341UL, 812664089UL, 1778922726UL, 1686111212UL, 97278413
8UL, 3936486236UL, 2711468739UL, 423435866UL, 1661961159UL, 802312780UL, 18
68728136UL, 1760295704UL, 3357409828UL, 215039860UL, 683184627UL, 401911106
4UL, 3609261689UL, 2167554309UL, 1831085281UL, 3389357802UL, 4193421575UL,
628277197UL, 2900207619UL, 993609502UL, 3429627083UL, 2636466084UL, 3652352
199UL, 1780133580UL, 1670387713UL, 4086070210UL, 4004540729UL, 783029246UL,
2165667566UL, 1739001057UL, 377639972UL, 1102689625UL, 1945278055UL, 39411
85940UL, 3685368326UL, 1881761572UL, 2201338934UL, 801752UL, 2729497735UL,
492844690UL, 2998826141UL, 3844964457UL, 3679088359UL, 2196391660UL, 422226
9404UL, 357321611UL, 3727170055UL, 1819614072UL, 2348798457UL, 4294366646UL
, 1952884323UL, 3574345216UL, 2040734807UL, 232392443UL, 4183498179UL, 2614
866055UL, 112120292UL, 3624018350UL, 3340709877UL, 3097507723UL, 1268833488
UL, 3570501956UL, 3338260086UL, 293812421UL, 3683058169UL, 1147960351UL, 28
3731890UL, 2171233479UL, 1830154455UL, 4036602681UL, 1996981699UL, 13280383
4UL, 40256165UL, 2158110401UL, 3575159090UL, 3196553513UL, 3559872992UL, 34
02884675UL, 1998393432UL, 2665389278UL, 3989307699UL, 3267631636UL, 3617519
767UL, 3243522970UL, 1243992413UL, 2200497260UL, 3821883021UL, 3715729085UL
, 3451270040UL, 3044132745UL, 2101287249UL, 2340839784UL, 3173635549UL, 172
4350416UL, 3228881240UL, 3123386528UL, 4279362126UL, 2287520039UL, 26355340
69UL, 3622906431UL, 206207480UL, 1894245533UL, 96723416UL, 1011223653UL, 72
71757UL, 2972858087UL, 207942127UL, 1668335352UL, 2593296740UL, 174093751UL
, 3713822176UL, 4212355586UL, 49226793UL, 1171716408UL, 2867257989UL, 15222
13957UL, 2016192462UL, 118712412UL, 2174928148UL, 1468226225UL, 3938290338U
L, 493240317UL, 3788174304UL, 2585475729UL, 3112454413UL, 1881171707UL, 255
5908056UL, 3351139844UL, 380428329UL, 3341885423UL, 3307510279UL, 351947667
6UL, 1368994724UL, 2555826262UL, 109341943UL, 2382715395UL, 3883409616UL, 1
561509458UL, 2163678014UL, 3379783137UL, 2810374300UL, 1516064864UL, 231325
2274UL, 316017838UL, 1899237567UL, 70857401UL, 3435185465UL, 2585770746UL,
2580352177UL, 32879620UL, 4171670150UL, 1986234067UL, 3317983509UL, 2073132
526UL, 2603712175UL, 377997975UL, 2474419397UL, 908728599UL, 812664089UL, 1
778922726UL, 1686111212UL, 972784138UL, 1992540005UL, 2711468739UL, 4234358
66UL, 1661961159UL, 802312780UL, 907108769UL, 1760295704UL, 3357409828UL, 2
15039860UL, 683184627UL, 2806826652UL, 3609261689UL, 2167554309UL, 18310852
81UL, 3389357802UL, 2755692689UL, 628277197UL, 2900207619UL, 993609502UL, 3
429627083UL, 3605915742UL, 3652352199UL, 1780133580UL, 1670387713UL, 408607
0210UL, 3717326627UL, 783029246UL, 2165667566UL, 1739001057UL, 377639972UL,
2355216626UL, 1945278055UL, 3941185940UL, 3685368326UL, 1881761572UL, 4024
097818UL, 801752UL, 2729497735UL, 492844690UL, 2998826141UL, 2719601647UL,
3679088359UL, 2196391660UL, 4222269404UL, 357321611UL, 1319821972UL, 181961
4072UL, 2348798457UL, 4294366646UL, 1952884323UL, 3573866689UL, 2040734807U
L, 232392443UL, 4183498179UL, 2614866055UL, 440744432UL, 3624018350UL, 3340
709877UL, 3097507723UL, 1268833488UL, 224895395UL, 3338260086UL, 293812421U
L, 3683058169UL, 1147960351UL, 3433425235UL, 2171233479UL, 1830154455UL, 40
36602681UL, 1996981699UL, 2875889721UL, 40256165UL, 2158110401UL, 357515909
0UL, 3196553513UL, 1094082574UL, 3402884675UL, 1998393432UL, 2665389278UL,
3989307699UL, 4068940467UL, 3617519767UL, 3243522970UL, 1243992413UL, 22004
97260UL, 441678457UL, 3715729085UL, 3451270040UL, 3044132745UL, 2101287249U
L, 2181502237UL, 3173635549UL, 1724350416UL, 3228881240UL, 3123386528UL, 19
68352124UL, 2287520039UL, 2635534069UL, 3622906431UL, 206207480UL, 20650935
99UL, 96723416UL, 1011223653UL, 7271757UL, 2972858087UL, 1094044749UL, 1668
335352UL, 2593296740UL, 174093751UL, 3713822176UL, 2887397643UL, 49226793UL
, 1171716408UL, 2867257989UL, 1522213957UL, 984348433UL, 118712412UL, 21749
28148UL, 1468226225UL, 3938290338UL, 2279430036UL, 3788174304UL, 2585475729
UL, 3112454413UL, 1881171707UL, 4247636500UL, 3351139844UL, 380428329UL, 33
41885423UL, 3307510279UL, 2887754196UL, 1368994724UL, 2555826262UL, 1093419
43UL, 2382715395UL, 2836761616UL, 1561509458UL, 2163678014UL, 3379783137UL,
2810374300UL, 1635278016UL, 2313252274UL, 316017838UL, 1899237567UL, 70857
401UL, 3481535811UL, 2585770746UL, 2580352177UL, 32879620UL, 4171670150UL,
2248003250UL, 3317983509UL, 2073132526UL, 2603712175UL, 377997975UL, 328616
2818UL, 908728599UL, 812664089UL, 1778922726UL, 1686111212UL, 4024815755UL,
1992540005UL, 2711468739UL, 423435866UL, 1661961159UL, 2257259057UL, 90710
8769UL, 1760295704UL, 3357409828UL, 215039860UL, 3917391198UL, 2806826652UL
, 3609261689UL, 2167554309UL, 1831085281UL, 4238043113UL, 2755692689UL, 628
277197UL, 2900207619UL, 993609502UL, 2036092353UL, 3605915742UL, 3652352199
UL, 1780133580UL, 1670387713UL, 118446953UL, 3717326627UL, 783029246UL, 216
5667566UL, 1739001057UL, 203160626UL, 2355216626UL, 1945278055UL, 394118594
0UL, 3685368326UL, 546361979UL, 4024097818UL, 801752UL, 2729497735UL, 49284
4690UL, 1023017124UL, 2719601647UL, 3679088359UL, 2196391660UL, 4222269404U
L, 621859651UL, 1319821972UL, 1819614072UL, 2348798457UL, 4294366646UL, 111
4888560UL, 3573866689UL, 2040734807UL, 232392443UL, 4183498179UL, 395950460
9UL, 440744432UL, 3624018350UL, 3340709877UL, 3097507723UL, 3613295037UL, 2
24895395UL, 3338260086UL, 293812421UL, 3683058169UL, 1655305863UL, 34334252
35UL, 2171233479UL, 1830154455UL, 4036602681UL, 3731384097UL, 2875889721UL,
40256165UL, 2158110401UL, 3575159090UL, 1847744924UL, 1094082574UL, 340288
4675UL, 1998393432UL, 2665389278UL, 3781866777UL, 4068940467UL, 3617519767U
L, 3243522970UL, 1243992413UL, 2723708256UL, 441678457UL, 3715729085UL, 345
1270040UL, 3044132745UL, 4013832842UL, 2181502237UL, 3173635549UL, 17243504
16UL, 3228881240UL, 2092292494UL, 1968352124UL, 2287520039UL, 2635534069UL,
3622906431UL, 3186333458UL, 2065093599UL, 96723416UL, 1011223653UL, 727175
7UL, 649658033UL, 1094044749UL, 1668335352UL, 2593296740UL, 174093751UL, 41
59420309UL, 2887397643UL, 49226793UL, 1171716408UL, 2867257989UL, 259007795
3UL, 984348433UL, 118712412UL, 2174928148UL, 1468226225UL, 1065322711UL, 22
79430036UL, 3788174304UL, 2585475729UL, 3112454413UL, 3932517386UL, 4247636
500UL, 3351139844UL, 380428329UL, 3341885423UL, 1285273904UL, 2887754196UL,
1368994724UL, 2555826262UL, 109341943UL, 2318470582UL, 2836761616UL, 15615
09458UL, 2163678014UL, 3379783137UL, 674658583UL, 1635278016UL, 2313252274U
L, 316017838UL, 1899237567UL, 2192372173UL, 3481535811UL, 2585770746UL, 258
0352177UL, 32879620UL, 300323274UL, 2248003250UL, 3317983509UL, 2073132526U
L, 2603712175UL, 3086543917UL, 3286162818UL, 908728599UL, 812664089UL, 1778
922726UL, 2263290659UL, 4024815755UL, 1992540005UL, 2711468739UL, 423435866
UL, 819027349UL, 2257259057UL, 907108769UL, 1760295704UL, 3357409828UL, 114
2221093UL, 3917391198UL, 2806826652UL, 3609261689UL, 2167554309UL, 41081558
75UL, 4238043113UL, 2755692689UL, 628277197UL, 2900207619UL, 3041719497UL,
2036092353UL, 3605915742UL, 3652352199UL, 1780133580UL, 2397410862UL, 11844
6953UL, 3717326627UL, 783029246UL, 2165667566UL, 2721690354UL, 203160626UL,
2355216626UL, 1945278055UL, 3941185940UL, 2768842108UL, 546361979UL, 40240
97818UL, 801752UL, 2729497735UL, 4045063232UL, 1023017124UL, 2719601647UL,
3679088359UL, 2196391660UL, 2666107451UL, 621859651UL, 1319821972UL, 181961
4072UL, 2348798457UL, 3555102623UL, 1114888560UL, 3573866689UL, 2040734807U
L, 232392443UL, 3359040541UL, 3959504609UL, 440744432UL, 3624018350UL, 3340
709877UL, 1477919696UL, 3613295037UL, 224895395UL, 3338260086UL, 293812421U
L, 4210187101UL, 1655305863UL, 3433425235UL, 2171233479UL, 1830154455UL, 41
50241150UL, 3731384097UL, 2875889721UL, 40256165UL, 2158110401UL, 335024668
7UL, 455561037UL, 2250400255UL, 3192153445UL, 3258870230UL, 1500391873UL, 4
142878334UL, 1155955691UL, 1483275844UL, 4189436981UL, 323745948UL, 1976017
426UL, 2804626790UL, 2717553615UL, 2315409034UL, 954508235UL, 3845175920UL,
3999878682UL, 1247696432UL, 1743319509UL, 2998248398UL, 3694350012UL, 4072
006361UL, 191306987UL, 2816321878UL, 1324077734UL, 1083060006UL, 3406855480
UL, 1619622379UL, 2160350UL, 3302238190UL, 3368021261UL, 3685228564UL, 3863
934685UL, 771728612UL, 854205233UL, 2304696695UL, 421449207UL, 1265752117UL
, 3852292419UL, 305345788UL, 1540622105UL, 1904883477UL, 833469256UL, 13440
6680UL, 3012455058UL, 4035477953UL, 2925192459UL, 1559200592UL, 3851612860U
L, 718484562UL, 1377960276UL, 1586892849UL, 1361298269UL, 3417917896UL, 128
1324499UL, 1012538763UL, 1350578667UL, 3946475598UL, 2982283954UL, 35487928
04UL, 284542749UL, 1194648577UL, 3087899716UL, 3966595444UL, 2088330116UL,
3641652062UL, 327128507UL, 593906557UL, 1092448919UL, 2459189516UL, 4053392
241UL, 3356198248UL, 2352376508UL, 470648997UL, 1017041256UL, 3234172340UL,
3928191489UL, 3266226858UL, 4219289150UL, 1229098319UL, 4275351308UL, 2720
777751UL, 3566728718UL, 638322822UL, 2369792461UL, 2869492261UL, 3120083828
UL, 1890399556UL, 3309991008UL, 3785452464UL, 4128660314UL, 3726791982UL, 1
67177896UL, 461294981UL, 3988638998UL, 2937794823UL, 3981029822UL, 11116814
02UL, 2015965721UL, 7261806UL, 2669786265UL, 1083582734UL, 3270228881UL, 38
92235938UL, 2695872715UL, 4246051290UL, 3214293333UL, 343604199UL, 32156048
88UL, 661024127UL, 2931754053UL, 3787840039UL, 2053363765UL, 363432336UL, 1
12334132UL, 2871797223UL, 138911320UL, 3981126938UL, 2027332192UL, 18047306
44UL, 590150270UL, 641538574UL, 6802174UL, 3551446076UL, 3908480472UL, 1004
531022UL, 2097228524UL, 1919074232UL, 154482247UL, 121437972UL, 1215661323U
L, 1178068273UL, 1097220699UL, 2823681422UL, 262636065UL, 2943371149UL, 176
8780720UL, 3866040605UL, 1855991583UL, 3988248086UL, 629223947UL, 338061233
0UL, 3552916762UL, 197596340UL, 573801686UL, 2049230598UL, 2910471867UL, 26
86314264UL, 1726228846UL, 3516983332UL, 726840185UL, 1241204222UL, 22375743
17UL, 70568042UL, 1932610099UL, 2221862221UL, 1510378092UL, 4050391637UL, 4
077539568UL, }, {3872117793UL, 803220151UL, 70843412UL, 1661103032UL, 19768
11457UL, 2186373604UL, 564259972UL, 1475436923UL, 2260980893UL, 4245534505U
L, 1075107552UL, 3692990573UL, 370098873UL, 4045905424UL, 2420395420UL, 233
2395402UL, 207483321UL, 622317750UL, 3004242500UL, 833623111UL, 3151161301U
L, 1629139881UL, 352228793UL, 2439953368UL, 3183333619UL, 2703537080UL, 321
8957129UL, 3164695888UL, 1741641842UL, 963394141UL, 4241612717UL, 103447678
4UL, 2035880432UL, 3977821313UL, 1543311495UL, 3010014356UL, 1638490901UL,
2364265378UL, 3420329129UL, 333361555UL, 1133565821UL, 1450937015UL, 616059
115UL, 3216393887UL, 3041978455UL, 3990855695UL, 1238628750UL, 512746184UL,
3256670217UL, 1616316512UL, 2791405051UL, 93474487UL, 2865892488UL, 190147
1398UL, 2930857966UL, 2178431077UL, 2325598341UL, 3189256113UL, 1302432091U
L, 808592927UL, 2945846737UL, 3487931071UL, 2018175258UL, 752981057UL, 1097
082589UL, 1307115286UL, 175147508UL, 3611190164UL, 850238914UL, 3318706185U
L, 199743319UL, 328621708UL, 3183670050UL, 3609998315UL, 4075306371UL, 3554
549067UL, 2119566187UL, 1498503842UL, 1261870696UL, 2216745780UL, 950288337
UL, 1117344941UL, 2150569143UL, 2899286760UL, 1594966374UL, 888858617UL, 35
840654UL, 2829539211UL, 2511395669UL, 3607190544UL, 3278412778UL, 224989590
7UL, 1320858068UL, 3576889788UL, 266766189UL, 1522426851UL, 1903494122UL, 1
928370573UL, 2628132591UL, 3322025904UL, 220280169UL, 433606853UL, 14289614
79UL, 986074592UL, 2128892987UL, 467697583UL, 1616913929UL, 325674890UL, 44
4442578UL, 649166208UL, 1689709565UL, 1493452467UL, 2222122038UL, 121114616
UL, 2134348225UL, 3512035688UL, 1283058921UL, 4230441398UL, 3701238559UL, 3
37534132UL, 1418548715UL, 1190006478UL, 500654385UL, 1766924757UL, 19446807
46UL, 940574010UL, 922744002UL, 186142284UL, 3131162902UL, 1693891092UL, 30
31823448UL, 2143051534UL, 1429025284UL, 1487843160UL, 3606456133UL, 2079235
652UL, 2447285474UL, 2669283767UL, 3232117829UL, 2490054343UL, 3225501736UL
, 2911340385UL, 382319031UL, 1516937595UL, 622543191UL, 1388990570UL, 17491
79860UL, 1924483707UL, 2593474505UL, 472539197UL, 122872799UL, 2586347240UL
, 880588515UL, 4046335279UL, 1712182607UL, 4270737941UL, 1336703451UL, 3390
078162UL, 382216945UL, 3733326081UL, 460422073UL, 3872117793UL, 803220151UL
, 70843412UL, 1661103032UL, 250339760UL, 2186373604UL, 564259972UL, 1475436
923UL, 2260980893UL, 657986735UL, 1075107552UL, 3692990573UL, 370098873UL,
4045905424UL, 3201950123UL, 2332395402UL, 207483321UL, 622317750UL, 3004242
500UL, 3732213278UL, 3151161301UL, 1629139881UL, 352228793UL, 2439953368UL,
3572618926UL, 2703537080UL, 3218957129UL, 3164695888UL, 1741641842UL, 6859
33373UL, 4241612717UL, 1034476784UL, 2035880432UL, 3977821313UL, 3855995181
UL, 3010014356UL, 1638490901UL, 2364265378UL, 3420329129UL, 2355603679UL, 1
133565821UL, 1450937015UL, 616059115UL, 3216393887UL, 1733804102UL, 3990855
695UL, 1238628750UL, 512746184UL, 3256670217UL, 2651059231UL, 2791405051UL,
93474487UL, 2865892488UL, 1901471398UL, 2113461797UL, 2178431077UL, 232559
8341UL, 3189256113UL, 1302432091UL, 2986990416UL, 2945846737UL, 3487931071U
L, 2018175258UL, 752981057UL, 2428033310UL, 1307115286UL, 175147508UL, 3611
190164UL, 850238914UL, 1033628405UL, 199743319UL, 328621708UL, 3183670050UL
, 3609998315UL, 4024297327UL, 3554549067UL, 2119566187UL, 1498503842UL, 126
1870696UL, 290361143UL, 950288337UL, 1117344941UL, 2150569143UL, 2899286760
UL, 168826051UL, 888858617UL, 35840654UL, 2829539211UL, 2511395669UL, 28908
82060UL, 3278412778UL, 2249895907UL, 1320858068UL, 3576889788UL, 1794920145
UL, 1522426851UL, 1903494122UL, 1928370573UL, 2628132591UL, 1251697758UL, 2
20280169UL, 433606853UL, 1428961479UL, 986074592UL, 2707115661UL, 467697583
UL, 1616913929UL, 325674890UL, 444442578UL, 122781510UL, 1689709565UL, 1493
452467UL, 2222122038UL, 121114616UL, 3425723636UL, 3512035688UL, 1283058921
UL, 4230441398UL, 3701238559UL, 1646155473UL, 1418548715UL, 1190006478UL, 5
00654385UL, 1766924757UL, 3920475367UL, 940574010UL, 922744002UL, 186142284
UL, 3131162902UL, 54639113UL, 3031823448UL, 2143051534UL, 1429025284UL, 148
7843160UL, 4152687885UL, 2079235652UL, 2447285474UL, 2669283767UL, 32321178
29UL, 1601035152UL, 3225501736UL, 2911340385UL, 382319031UL, 1516937595UL,
3508441679UL, 1388990570UL, 1749179860UL, 1924483707UL, 2593474505UL, 28354
03456UL, 122872799UL, 2586347240UL, 880588515UL, 4046335279UL, 2958058367UL
, 4270737941UL, 1336703451UL, 3390078162UL, 382216945UL, 450517882UL, 46042
2073UL, 3872117793UL, 803220151UL, 70843412UL, 2066343874UL, 250339760UL, 2
186373604UL, 564259972UL, 1475436923UL, 1683787449UL, 657986735UL, 10751075
52UL, 3692990573UL, 370098873UL, 2615082840UL, 3201950123UL, 2332395402UL,
207483321UL, 622317750UL, 2655424371UL, 3732213278UL, 3151161301UL, 1629139
881UL, 352228793UL, 3236724760UL, 3572618926UL, 2703537080UL, 3218957129UL,
3164695888UL, 9775065UL, 685933373UL, 4241612717UL, 1034476784UL, 20358804
32UL, 1621920075UL, 3855995181UL, 3010014356UL, 1638490901UL, 2364265378UL,
1509475888UL, 2355603679UL, 1133565821UL, 1450937015UL, 616059115UL, 36661
88236UL, 1733804102UL, 3990855695UL, 1238628750UL, 512746184UL, 3900473826U
L, 2651059231UL, 2791405051UL, 93474487UL, 2865892488UL, 222759186UL, 21134
61797UL, 2178431077UL, 2325598341UL, 3189256113UL, 2505499508UL, 2986990416
UL, 2945846737UL, 3487931071UL, 2018175258UL, 2766733928UL, 2428033310UL, 1
307115286UL, 175147508UL, 3611190164UL, 1909211603UL, 1033628405UL, 1997433
19UL, 328621708UL, 3183670050UL, 1680331218UL, 4024297327UL, 3554549067UL,
2119566187UL, 1498503842UL, 3516256046UL, 290361143UL, 950288337UL, 1117344
941UL, 2150569143UL, 3182619063UL, 168826051UL, 888858617UL, 35840654UL, 28
29539211UL, 645798943UL, 2890882060UL, 3278412778UL, 2249895907UL, 13208580
68UL, 1436708568UL, 1794920145UL, 1522426851UL, 1903494122UL, 1928370573UL,
3693049252UL, 1251697758UL, 220280169UL, 433606853UL, 1428961479UL, 372441
5861UL, 2707115661UL, 467697583UL, 1616913929UL, 325674890UL, 1448052253UL,
122781510UL, 1689709565UL, 1493452467UL, 2222122038UL, 2177448198UL, 34257
23636UL, 3512035688UL, 1283058921UL, 4230441398UL, 3050940272UL, 1646155473
UL, 1418548715UL, 1190006478UL, 500654385UL, 1106232UL, 3920475367UL, 94057
4010UL, 922744002UL, 186142284UL, 4144806511UL, 54639113UL, 3031823448UL, 2
143051534UL, 1429025284UL, 2067453848UL, 4152687885UL, 2079235652UL, 244728
5474UL, 2669283767UL, 428527087UL, 1601035152UL, 3225501736UL, 2911340385UL
, 382319031UL, 2565464472UL, 3508441679UL, 1388990570UL, 1749179860UL, 1924
483707UL, 1737735237UL, 2835403456UL, 122872799UL, 2586347240UL, 880588515U
L, 597822462UL, 2958058367UL, 4270737941UL, 1336703451UL, 3390078162UL, 253
2634475UL, 450517882UL, 460422073UL, 3872117793UL, 803220151UL, 801648827UL
, 2066343874UL, 250339760UL, 2186373604UL, 564259972UL, 3417948976UL, 16837
87449UL, 657986735UL, 1075107552UL, 3692990573UL, 2235306692UL, 2615082840U
L, 3201950123UL, 2332395402UL, 207483321UL, 699310933UL, 2655424371UL, 3732
213278UL, 3151161301UL, 1629139881UL, 1152704006UL, 3236724760UL, 357261892
6UL, 2703537080UL, 3218957129UL, 2726926336UL, 9775065UL, 685933373UL, 4241
612717UL, 1034476784UL, 2398119652UL, 1621920075UL, 3855995181UL, 301001435
6UL, 1638490901UL, 252854480UL, 1509475888UL, 2355603679UL, 1133565821UL, 1
450937015UL, 2655911639UL, 3666188236UL, 1733804102UL, 3990855695UL, 123862
8750UL, 1115900497UL, 3900473826UL, 2651059231UL, 2791405051UL, 93474487UL,
1862985957UL, 222759186UL, 2113461797UL, 2178431077UL, 2325598341UL, 41790
75132UL, 2505499508UL, 2986990416UL, 2945846737UL, 3487931071UL, 564667776U
L, 2766733928UL, 2428033310UL, 1307115286UL, 175147508UL, 1759077815UL, 190
9211603UL, 1033628405UL, 199743319UL, 328621708UL, 2552816198UL, 1680331218
UL, 4024297327UL, 3554549067UL, 2119566187UL, 2267805778UL, 3516256046UL, 2
90361143UL, 950288337UL, 1117344941UL, 2897506172UL, 3182619063UL, 16882605
1UL, 888858617UL, 35840654UL, 2035476068UL, 645798943UL, 2890882060UL, 3278
412778UL, 2249895907UL, 3278449102UL, 1436708568UL, 1794920145UL, 152242685
1UL, 1903494122UL, 1500763736UL, 3693049252UL, 1251697758UL, 220280169UL, 4
33606853UL, 3914497854UL, 3724415861UL, 2707115661UL, 467697583UL, 16169139
29UL, 918435305UL, 1448052253UL, 122781510UL, 1689709565UL, 1493452467UL, 6
09575172UL, 2177448198UL, 3425723636UL, 3512035688UL, 1283058921UL, 3661181
550UL, 3050940272UL, 1646155473UL, 1418548715UL, 1190006478UL, 1047301661UL
, 1106232UL, 3920475367UL, 940574010UL, 922744002UL, 2510633517UL, 41448065
11UL, 54639113UL, 3031823448UL, 2143051534UL, 3242814908UL, 2067453848UL, 4
152687885UL, 2079235652UL, 2447285474UL, 736638210UL, 428527087UL, 16010351
52UL, 3225501736UL, 2911340385UL, 1849570436UL, 2565464472UL, 3508441679UL,
1388990570UL, 1749179860UL, 84517579UL, 1737735237UL, 2835403456UL, 122872
799UL, 2586347240UL, 4002124614UL, 597822462UL, 2958058367UL, 4270737941UL,
1336703451UL, 3078170472UL, 1186434751UL, 700631413UL, 1497890797UL, 11953
47450UL, 2560167391UL, 1116697259UL, 1254138573UL, 747913260UL, 240954704UL
, 3107512667UL, 360584144UL, 3422778960UL, 3516528389UL, 3301260366UL, 1254
513537UL, 122269053UL, 1579582456UL, 873334104UL, 3918835024UL, 1731872444U
L, 1974410416UL, 1811172641UL, 4172523062UL, 4092675777UL, 4124987343UL, 19
36078756UL, 1757348689UL, 2694415512UL, 128641660UL, 1744777659UL, 31731167
29UL, 983733754UL, 1430789547UL, 701906842UL, 3367232568UL, 3266433501UL, 3
572590347UL, 1453272962UL, 2106553114UL, 993786201UL, 2149441250UL, 1295181
065UL, 2962229026UL, 3709052556UL, 3255608941UL, 3677730029UL, 483873127UL,
102227292UL, 2626265293UL, 2018984578UL, 2266388762UL, 1191709548UL, 21527
25916UL, 583672623UL, 2230473473UL, 1995194269UL, 1740347812UL, 2558095372U
L, 3070195183UL, 3023333227UL, 2497183195UL, 1908755188UL, 773027539UL, 364
6876518UL, 2272586839UL, 493318726UL, 2107067517UL, 2000805278UL, 253082963
6UL, 3183628745UL, 677565332UL, 1497629423UL, 82094920UL, 2214054433UL, 263
5367545UL, 470855467UL, 2184853389UL, 2942188934UL, 188335670UL, 3656661644
UL, 1883526235UL, 3990873975UL, 1490784356UL, 4047548172UL, 3149642641UL, 3
289988179UL, 2590918909UL, 2893039564UL, 2350687346UL, 4252624874UL, 153724
56UL, 1614496594UL, 2364847678UL, 2604511825UL, 422365460UL, 4195174772UL,
3266964836UL, 2008671995UL, 54038434UL, 781948549UL, 1276017666UL, 27563766
12UL, 2436825273UL, 1711863836UL, 3541493950UL, 3821378841UL, 1007557618UL,
345375815UL, 2081905201UL, 2227278118UL, 1185927141UL, 1082173792UL, 35673
61925UL, 1940465859UL, 541632942UL, 1830210248UL, 3757851982UL, 775883450UL
, 1666577465UL, 1004944607UL, 878440834UL, 2146344131UL, 4195798476UL, 3701
64841UL, 3649112729UL, 37066142UL, 2311278904UL, 1935745497UL, 2304799402UL
, 4107299626UL, 1348526232UL, 2473609635UL, 3284032699UL, 2374292786UL, 176
2329186UL, 857978496UL, 1039346432UL, 2621413355UL, 29961014UL, 3582263091U
L, 4268542513UL, 3890612190UL, 3096173646UL, 2026544230UL, 3856142618UL, 23
47115934UL, 319800326UL, 3255916105UL, 2430273059UL, 823505311UL, 874255188
UL, 1401925393UL, 4203707857UL, 4259159566UL, 2606881118UL, 1978288664UL, 1
447576038UL, 3860341401UL, 412510348UL, }, {4052471963UL, 683640040UL, 3043
876021UL, 3466644483UL, 4222418025UL, 3035140128UL, 1466027937UL, 18198088U
L, 3410320851UL, 3040963721UL, 488404231UL, 3157371815UL, 769336092UL, 3240
417718UL, 808582581UL, 2075839263UL, 835026995UL, 3123726486UL, 3284240985U
L, 1898453053UL, 3606056482UL, 512836002UL, 2715428547UL, 4182302879UL, 164
4882480UL, 3160187826UL, 390292489UL, 980889545UL, 2776206633UL, 2482799995
UL, 617042280UL, 3501667414UL, 689451808UL, 497018701UL, 238525753UL, 38901
63301UL, 896679896UL, 1544533015UL, 3412477225UL, 3116575138UL, 4250402651U
L, 3990990746UL, 819056741UL, 1459334146UL, 158377590UL, 3444755752UL, 8230
450UL, 1378706455UL, 684191332UL, 3217423797UL, 2842520097UL, 1631477948UL,
2591254230UL, 959644473UL, 1020694107UL, 1748401915UL, 3452514983UL, 38927
66171UL, 1227786994UL, 2086180800UL, 2394613217UL, 2091953150UL, 870094953U
L, 2306851481UL, 571550601UL, 488878212UL, 873197214UL, 2630100528UL, 20674
76907UL, 2162307009UL, 2026119728UL, 115875280UL, 2905867426UL, 248774881UL
, 3110900450UL, 2236032812UL, 1888510348UL, 708001855UL, 996960491UL, 35141
96956UL, 1407967546UL, 1826568876UL, 3659618284UL, 2614104317UL, 2230066308
UL, 1055135881UL, 2537437343UL, 1858044413UL, 2608594891UL, 2750681169UL, 3
241939420UL, 3966440877UL, 2375002886UL, 2417753441UL, 1405878685UL, 108113
3199UL, 1496940727UL, 382467042UL, 2745477587UL, 1209424459UL, 811187075UL,
1385604734UL, 2623887355UL, 3443875720UL, 394141555UL, 4142998949UL, 41954
14618UL, 1489846841UL, 2253433808UL, 1171450286UL, 84131191UL, 4387588UL, 2
641405140UL, 3525405389UL, 3273000909UL, 423660319UL, 2366546732UL, 3698878
607UL, 2161119729UL, 4263629085UL, 3029102089UL, 2692507376UL, 3266869596UL
, 1658012061UL, 1960169440UL, 1002311379UL, 3724446882UL, 2004188516UL, 999
513506UL, 2200093802UL, 4141037460UL, 351865836UL, 412875013UL, 1535823315U
L, 3880657632UL, 3109944987UL, 3207577548UL, 3462087941UL, 584875517UL, 263
5241084UL, 3834145971UL, 1693380373UL, 3524443732UL, 934775214UL, 196058884
7UL, 2226778032UL, 1044609478UL, 12199016UL, 1120582000UL, 226430296UL, 665
553142UL, 2570993348UL, 1685535237UL, 3325420136UL, 3925248326UL, 285534637
6UL, 1205558328UL, 808835317UL, 3295908896UL, 4170076136UL, 2438272365UL, 4
052471963UL, 683640040UL, 3043876021UL, 3466644483UL, 1385549869UL, 3035140
128UL, 1466027937UL, 18198088UL, 3410320851UL, 2171386836UL, 488404231UL, 3
157371815UL, 769336092UL, 3240417718UL, 2921774554UL, 2075839263UL, 8350269
95UL, 3123726486UL, 3284240985UL, 72352110UL, 3606056482UL, 512836002UL, 27
15428547UL, 4182302879UL, 3869483469UL, 3160187826UL, 390292489UL, 98088954
5UL, 2776206633UL, 1385691983UL, 617042280UL, 3501667414UL, 689451808UL, 49
7018701UL, 2600411809UL, 3890163301UL, 896679896UL, 1544533015UL, 341247722
5UL, 356556378UL, 4250402651UL, 3990990746UL, 819056741UL, 1459334146UL, 19
9003993UL, 3444755752UL, 8230450UL, 1378706455UL, 684191332UL, 1750733272UL
, 2842520097UL, 1631477948UL, 2591254230UL, 959644473UL, 2113375576UL, 1748
401915UL, 3452514983UL, 3892766171UL, 1227786994UL, 275473920UL, 2394613217
UL, 2091953150UL, 870094953UL, 2306851481UL, 897057645UL, 488878212UL, 8731
97214UL, 2630100528UL, 2067476907UL, 944114068UL, 2026119728UL, 115875280UL
, 2905867426UL, 248774881UL, 989201307UL, 2236032812UL, 1888510348UL, 70800
1855UL, 996960491UL, 2121706374UL, 1407967546UL, 1826568876UL, 3659618284UL
, 2614104317UL, 2931815032UL, 1055135881UL, 2537437343UL, 1858044413UL, 260
8594891UL, 1423973935UL, 3241939420UL, 3966440877UL, 2375002886UL, 24177534
41UL, 2514473440UL, 1081133199UL, 1496940727UL, 382467042UL, 2745477587UL,
81977310UL, 811187075UL, 1385604734UL, 2623887355UL, 3443875720UL, 21006298
79UL, 4142998949UL, 4195414618UL, 1489846841UL, 2253433808UL, 337182869UL,
84131191UL, 4387588UL, 2641405140UL, 3525405389UL, 661876463UL, 423660319UL
, 2366546732UL, 3698878607UL, 2161119729UL, 309510684UL, 3029102089UL, 2692
507376UL, 3266869596UL, 1658012061UL, 11119541UL, 1002311379UL, 3724446882U
L, 2004188516UL, 999513506UL, 3486722046UL, 4141037460UL, 351865836UL, 4128
75013UL, 1535823315UL, 2818130700UL, 3109944987UL, 3207577548UL, 3462087941
UL, 584875517UL, 322875622UL, 3834145971UL, 1693380373UL, 3524443732UL, 934
775214UL, 3879414752UL, 2226778032UL, 1044609478UL, 12199016UL, 1120582000U
L, 4207259464UL, 665553142UL, 2570993348UL, 1685535237UL, 3325420136UL, 553
869152UL, 2855346376UL, 1205558328UL, 808835317UL, 3295908896UL, 470585896U
L, 2438272365UL, 4052471963UL, 683640040UL, 3043876021UL, 1588419572UL, 138
5549869UL, 3035140128UL, 1466027937UL, 18198088UL, 363815288UL, 2171386836U
L, 488404231UL, 3157371815UL, 769336092UL, 2464768302UL, 2921774554UL, 2075
839263UL, 835026995UL, 3123726486UL, 4229246330UL, 72352110UL, 3606056482UL
, 512836002UL, 2715428547UL, 319830805UL, 3869483469UL, 3160187826UL, 39029
2489UL, 980889545UL, 2966401462UL, 1385691983UL, 617042280UL, 3501667414UL,
689451808UL, 4047377762UL, 2600411809UL, 3890163301UL, 896679896UL, 154453
3015UL, 764316452UL, 356556378UL, 4250402651UL, 3990990746UL, 819056741UL,
965331966UL, 199003993UL, 3444755752UL, 8230450UL, 1378706455UL, 51902971UL
, 1750733272UL, 2842520097UL, 1631477948UL, 2591254230UL, 426039404UL, 2113
375576UL, 1748401915UL, 3452514983UL, 3892766171UL, 2833368447UL, 275473920
UL, 2394613217UL, 2091953150UL, 870094953UL, 3524323828UL, 897057645UL, 488
878212UL, 873197214UL, 2630100528UL, 3939852929UL, 944114068UL, 2026119728U
L, 115875280UL, 2905867426UL, 3192643919UL, 989201307UL, 2236032812UL, 1888
510348UL, 708001855UL, 2166012172UL, 2121706374UL, 1407967546UL, 1826568876
UL, 3659618284UL, 135277096UL, 2931815032UL, 1055135881UL, 2537437343UL, 18
58044413UL, 2588429924UL, 1423973935UL, 3241939420UL, 3966440877UL, 2375002
886UL, 2477142003UL, 2514473440UL, 1081133199UL, 1496940727UL, 382467042UL,
1760129281UL, 81977310UL, 811187075UL, 1385604734UL, 2623887355UL, 4070531
513UL, 2100629879UL, 4142998949UL, 4195414618UL, 1489846841UL, 2688068550UL
, 337182869UL, 84131191UL, 4387588UL, 2641405140UL, 1837403234UL, 661876463
UL, 423660319UL, 2366546732UL, 3698878607UL, 2916121190UL, 309510684UL, 302
9102089UL, 2692507376UL, 3266869596UL, 303422295UL, 11119541UL, 1002311379U
L, 3724446882UL, 2004188516UL, 2652711421UL, 3486722046UL, 4141037460UL, 35
1865836UL, 412875013UL, 113149471UL, 2818130700UL, 3109944987UL, 3207577548
UL, 3462087941UL, 1443140792UL, 322875622UL, 3834145971UL, 1693380373UL, 35
24443732UL, 901891935UL, 3879414752UL, 2226778032UL, 1044609478UL, 12199016
UL, 2213168758UL, 4207259464UL, 665553142UL, 2570993348UL, 1685535237UL, 11
14492412UL, 553869152UL, 2855346376UL, 1205558328UL, 808835317UL, 326662629
4UL, 470585896UL, 2438272365UL, 4052471963UL, 683640040UL, 3581539398UL, 15
88419572UL, 1385549869UL, 3035140128UL, 1466027937UL, 4075470388UL, 3638152
88UL, 2171386836UL, 488404231UL, 3157371815UL, 2759472233UL, 2464768302UL,
2921774554UL, 2075839263UL, 835026995UL, 1030654310UL, 4229246330UL, 723521
10UL, 3606056482UL, 512836002UL, 961858496UL, 319830805UL, 3869483469UL, 31
60187826UL, 390292489UL, 2366221117UL, 2966401462UL, 1385691983UL, 61704228
0UL, 3501667414UL, 295865937UL, 4047377762UL, 2600411809UL, 3890163301UL, 8
96679896UL, 21714884UL, 764316452UL, 356556378UL, 4250402651UL, 3990990746U
L, 1012967081UL, 965331966UL, 199003993UL, 3444755752UL, 8230450UL, 1255302
023UL, 51902971UL, 1750733272UL, 2842520097UL, 1631477948UL, 2321320272UL,
426039404UL, 2113375576UL, 1748401915UL, 3452514983UL, 2847013518UL, 283336
8447UL, 275473920UL, 2394613217UL, 2091953150UL, 1250695522UL, 3524323828UL
, 897057645UL, 488878212UL, 873197214UL, 1452317325UL, 3939852929UL, 944114
068UL, 2026119728UL, 115875280UL, 4061820350UL, 3192643919UL, 989201307UL,
2236032812UL, 1888510348UL, 3986446165UL, 2166012172UL, 2121706374UL, 14079
67546UL, 1826568876UL, 2910745432UL, 135277096UL, 2931815032UL, 1055135881U
L, 2537437343UL, 2976455307UL, 2588429924UL, 1423973935UL, 3241939420UL, 39
66440877UL, 2418897705UL, 2477142003UL, 2514473440UL, 1081133199UL, 1496940
727UL, 1321648771UL, 1760129281UL, 81977310UL, 811187075UL, 1385604734UL, 1
7644628UL, 4070531513UL, 2100629879UL, 4142998949UL, 4195414618UL, 26973105
27UL, 2688068550UL, 337182869UL, 84131191UL, 4387588UL, 1724191700UL, 18374
03234UL, 661876463UL, 423660319UL, 2366546732UL, 693430992UL, 2916121190UL,
309510684UL, 3029102089UL, 2692507376UL, 3917396098UL, 303422295UL, 111195
41UL, 1002311379UL, 3724446882UL, 841468294UL, 2652711421UL, 3486722046UL,
4141037460UL, 351865836UL, 1733384185UL, 113149471UL, 2818130700UL, 3109944
987UL, 3207577548UL, 2326233100UL, 1443140792UL, 322875622UL, 3834145971UL,
1693380373UL, 1580706359UL, 901891935UL, 3879414752UL, 2226778032UL, 10446
09478UL, 3805470822UL, 2213168758UL, 4207259464UL, 665553142UL, 2570993348U
L, 3406548636UL, 1114492412UL, 553869152UL, 2855346376UL, 1205558328UL, 428
7831475UL, 1329654114UL, 2347235746UL, 2477803138UL, 2962371859UL, 36100242
83UL, 4197266903UL, 1162294689UL, 1746713323UL, 2815058477UL, 2152552186UL,
4214791071UL, 2382522482UL, 3713914466UL, 3974765132UL, 348354997UL, 16702
76150UL, 2173074887UL, 381736894UL, 3866219357UL, 1919366695UL, 3635118824U
L, 2298653261UL, 3534332682UL, 1627699897UL, 4168636618UL, 3787938690UL, 21
44231271UL, 2067679462UL, 217001062UL, 2308928337UL, 1620415125UL, 35265591
72UL, 749451561UL, 2456947371UL, 3543607786UL, 1893824735UL, 962598819UL, 2
332807164UL, 1691114891UL, 2543992233UL, 2914780639UL, 1610287145UL, 170059
9697UL, 3185174208UL, 552323208UL, 2367242224UL, 3797136972UL, 3415066418UL
, 2468049249UL, 1677937401UL, 40445671UL, 2886682530UL, 2585715434UL, 19493
2329UL, 2994003812UL, 3099556382UL, 680852222UL, 135838738UL, 1371063256UL,
995454898UL, 3754526418UL, 803635682UL, 634588682UL, 3869250783UL, 2442285
521UL, 1455637058UL, 570621479UL, 2512681851UL, 1220136924UL, 750260121UL,
2909903038UL, 1582019728UL, 955115170UL, 1608265445UL, 2157390890UL, 230367
8604UL, 1568394164UL, 831914289UL, 1971271392UL, 1294799854UL, 1489945167UL
, 442427880UL, 1305083700UL, 1211218668UL, 2380073713UL, 2798736785UL, 2193
524273UL, 3227386915UL, 1636588977UL, 3612937642UL, 435113647UL, 1591761830
UL, 536210039UL, 2475747073UL, 4223795480UL, 1786737271UL, 1444661534UL, 32
49410301UL, 3333695212UL, 4169107188UL, 3280638635UL, 702659930UL, 14441279
70UL, 225340755UL, 2255629368UL, 746584456UL, 3965677674UL, 2671132955UL, 2
080717656UL, 2145343886UL, 3712441197UL, 368422910UL, 1297685674UL, 4076123
901UL, 26214470UL, 2948764826UL, 40503299UL, 1198194334UL, 2100063637UL, 19
66331612UL, 2189582064UL, 2064696934UL, 1797550642UL, 3469793941UL, 2868963
812UL, 851437659UL, 240918534UL, 365060070UL, 3530600064UL, 39695324UL, 175
3898837UL, 1286976449UL, 3131971360UL, 2406485219UL, 3365373704UL, 32241134
03UL, 1651742834UL, 587601940UL, 1574206085UL, 3739575036UL, 1413669616UL,
38172232UL, 293127854UL, 4126190109UL, 1891744061UL, 787878666UL, 456643669
UL, 4228710325UL, 2025132037UL, 1492133135UL, 3122840937UL, 969442079UL, 32
72420439UL, 3836126369UL, 1877655562UL, 2766212758UL, 3867984746UL, 3348077
578UL, 1841216706UL, }, {1676507466UL, 1017841240UL, 2992644565UL, 47693615
8UL, 2468072723UL, 3113105154UL, 1154120402UL, 460889625UL, 1942263502UL, 1
761593999UL, 3020908939UL, 3078194866UL, 310971889UL, 1644896012UL, 3756044
556UL, 3549937583UL, 3710822994UL, 3554313733UL, 2174654326UL, 4251063242UL
, 2340485150UL, 950951909UL, 4288936895UL, 3744348848UL, 706644559UL, 10859
27825UL, 1595992020UL, 3288724966UL, 1367247946UL, 2950094970UL, 3925419886
UL, 2628739022UL, 2528254629UL, 3582224789UL, 3907345559UL, 3373329273UL, 4
255542251UL, 1185418446UL, 4018656113UL, 2854344020UL, 1381160022UL, 364243
8773UL, 4284399225UL, 935780030UL, 4142412144UL, 1263328494UL, 1154237693UL
, 2684443667UL, 3067549398UL, 4253090033UL, 1251034970UL, 1874233020UL, 322
2830495UL, 3866931656UL, 286048055UL, 3146635362UL, 1436483376UL, 282187649
5UL, 3927829532UL, 2648886905UL, 2142862852UL, 1368937545UL, 2647327844UL,
1072219385UL, 2621337706UL, 3543274652UL, 911792564UL, 1204178178UL, 412721
4323UL, 2821691380UL, 3101998294UL, 730811902UL, 1989156224UL, 2872353003UL
, 278290276UL, 1390223786UL, 2657819643UL, 552729795UL, 1736270535UL, 27592
07116UL, 1897013739UL, 3657020278UL, 1387364861UL, 1966588302UL, 1049203087
UL, 486446521UL, 3675999281UL, 714737345UL, 686837530UL, 85509025UL, 360908
9773UL, 2117061768UL, 3935682560UL, 3859508784UL, 4105287041UL, 1808988481U
L, 83680601UL, 1464326680UL, 1657693523UL, 3318062731UL, 1391154023UL, 2344
60119UL, 3551348221UL, 2245244809UL, 3635923821UL, 2814385745UL, 3497626257
UL, 916790795UL, 245338628UL, 2514528380UL, 3711787525UL, 2239286063UL, 105
4058916UL, 3963706010UL, 3176203796UL, 2230543409UL, 2173597546UL, 37867338
92UL, 1396036965UL, 1038764273UL, 2032556038UL, 3216540537UL, 3298170974UL,
1008892557UL, 141155464UL, 1863766055UL, 3931110690UL, 191299053UL, 201913
9711UL, 2409528317UL, 739418419UL, 1377144055UL, 2876702705UL, 3911939673UL
, 1197696462UL, 2814009721UL, 600813233UL, 1535885024UL, 1486280357UL, 3084
650548UL, 2324695947UL, 2293284974UL, 2036339249UL, 3465600153UL, 162444610
8UL, 327866771UL, 3356772175UL, 1826625240UL, 1947102360UL, 3661848193UL, 1
421374867UL, 3228945021UL, 1358646008UL, 1067180174UL, 2190741258UL, 643362
354UL, 109899594UL, 2064362635UL, 3249674888UL, 2165543887UL, 4180291913UL,
1676507466UL, 1017841240UL, 2992644565UL, 476936158UL, 3608467942UL, 31131
05154UL, 1154120402UL, 460889625UL, 1942263502UL, 1862994005UL, 3020908939U
L, 3078194866UL, 310971889UL, 1644896012UL, 693774191UL, 3549937583UL, 3710
822994UL, 3554313733UL, 2174654326UL, 37658897UL, 2340485150UL, 950951909UL
, 4288936895UL, 3744348848UL, 2258231402UL, 1085927825UL, 1595992020UL, 328
8724966UL, 1367247946UL, 3850509554UL, 3925419886UL, 2628739022UL, 25282546
29UL, 3582224789UL, 3124287811UL, 3373329273UL, 4255542251UL, 1185418446UL,
4018656113UL, 1989726178UL, 1381160022UL, 3642438773UL, 4284399225UL, 9357
80030UL, 3622052196UL, 1263328494UL, 1154237693UL, 2684443667UL, 3067549398
UL, 2786224913UL, 1251034970UL, 1874233020UL, 3222830495UL, 3866931656UL, 1
529490307UL, 3146635362UL, 1436483376UL, 2821876495UL, 3927829532UL, 979247
444UL, 2142862852UL, 1368937545UL, 2647327844UL, 1072219385UL, 294065371UL,
3543274652UL, 911792564UL, 1204178178UL, 4127214323UL, 103582737UL, 310199
8294UL, 730811902UL, 1989156224UL, 2872353003UL, 1885087777UL, 1390223786UL
, 2657819643UL, 552729795UL, 1736270535UL, 3325206451UL, 1897013739UL, 3657
020278UL, 1387364861UL, 1966588302UL, 2117065739UL, 486446521UL, 3675999281
UL, 714737345UL, 686837530UL, 3946214694UL, 3609089773UL, 2117061768UL, 393
5682560UL, 3859508784UL, 2916136885UL, 1808988481UL, 83680601UL, 1464326680
UL, 1657693523UL, 3438751781UL, 1391154023UL, 234460119UL, 3551348221UL, 22
45244809UL, 3948410079UL, 2814385745UL, 3497626257UL, 916790795UL, 24533862
8UL, 1767303496UL, 3711787525UL, 2239286063UL, 1054058916UL, 3963706010UL,
4140631909UL, 2230543409UL, 2173597546UL, 3786733892UL, 1396036965UL, 11160
33475UL, 2032556038UL, 3216540537UL, 3298170974UL, 1008892557UL, 667272562U
L, 1863766055UL, 3931110690UL, 191299053UL, 2019139711UL, 272901326UL, 7394
18419UL, 1377144055UL, 2876702705UL, 3911939673UL, 3839312742UL, 2814009721
UL, 600813233UL, 1535885024UL, 1486280357UL, 4256065219UL, 2324695947UL, 22
93284974UL, 2036339249UL, 3465600153UL, 1215859603UL, 327866771UL, 33567721
75UL, 1826625240UL, 1947102360UL, 4240407984UL, 1421374867UL, 3228945021UL,
1358646008UL, 1067180174UL, 4100357988UL, 643362354UL, 109899594UL, 206436
2635UL, 3249674888UL, 2898852084UL, 4180291913UL, 1676507466UL, 1017841240U
L, 2992644565UL, 1569683812UL, 3608467942UL, 3113105154UL, 1154120402UL, 46
0889625UL, 966040649UL, 1862994005UL, 3020908939UL, 3078194866UL, 310971889
UL, 786634113UL, 693774191UL, 3549937583UL, 3710822994UL, 3554313733UL, 157
8429713UL, 37658897UL, 2340485150UL, 950951909UL, 4288936895UL, 2528123823U
L, 2258231402UL, 1085927825UL, 1595992020UL, 3288724966UL, 3544041088UL, 38
50509554UL, 3925419886UL, 2628739022UL, 2528254629UL, 2562145937UL, 3124287
811UL, 3373329273UL, 4255542251UL, 1185418446UL, 3693565710UL, 1989726178UL
, 1381160022UL, 3642438773UL, 4284399225UL, 3271478204UL, 3622052196UL, 126
3328494UL, 1154237693UL, 2684443667UL, 3615401444UL, 2786224913UL, 12510349
70UL, 1874233020UL, 3222830495UL, 2572413057UL, 1529490307UL, 3146635362UL,
1436483376UL, 2821876495UL, 3993894153UL, 979247444UL, 2142862852UL, 13689
37545UL, 2647327844UL, 1353904396UL, 294065371UL, 3543274652UL, 911792564UL
, 1204178178UL, 3165709748UL, 103582737UL, 3101998294UL, 730811902UL, 19891
56224UL, 893293786UL, 1885087777UL, 1390223786UL, 2657819643UL, 552729795UL
, 3388458110UL, 3325206451UL, 1897013739UL, 3657020278UL, 1387364861UL, 302
5318046UL, 2117065739UL, 486446521UL, 3675999281UL, 714737345UL, 2085926890
UL, 3946214694UL, 3609089773UL, 2117061768UL, 3935682560UL, 868009118UL, 29
16136885UL, 1808988481UL, 83680601UL, 1464326680UL, 797410789UL, 3438751781
UL, 1391154023UL, 234460119UL, 3551348221UL, 4068940987UL, 3948410079UL, 28
14385745UL, 3497626257UL, 916790795UL, 3722456098UL, 1767303496UL, 37117875
25UL, 2239286063UL, 1054058916UL, 2030352819UL, 4140631909UL, 2230543409UL,
2173597546UL, 3786733892UL, 3211336683UL, 1116033475UL, 2032556038UL, 3216
540537UL, 3298170974UL, 2589589144UL, 667272562UL, 1863766055UL, 3931110690
UL, 191299053UL, 1139480458UL, 272901326UL, 739418419UL, 1377144055UL, 2876
702705UL, 1954361769UL, 3839312742UL, 2814009721UL, 600813233UL, 1535885024
UL, 3587775605UL, 4256065219UL, 2324695947UL, 2293284974UL, 2036339249UL, 1
534849280UL, 1215859603UL, 327866771UL, 3356772175UL, 1826625240UL, 7203726
69UL, 4240407984UL, 1421374867UL, 3228945021UL, 1358646008UL, 3409069246UL,
4100357988UL, 643362354UL, 109899594UL, 2064362635UL, 4243434294UL, 289885
2084UL, 4180291913UL, 1676507466UL, 1017841240UL, 3243922356UL, 1569683812U
L, 3608467942UL, 3113105154UL, 1154120402UL, 1479311403UL, 966040649UL, 186
2994005UL, 3020908939UL, 3078194866UL, 1556392996UL, 786634113UL, 693774191
UL, 3549937583UL, 3710822994UL, 920664071UL, 1578429713UL, 37658897UL, 2340
485150UL, 950951909UL, 740197415UL, 2528123823UL, 2258231402UL, 1085927825U
L, 1595992020UL, 2580760267UL, 3544041088UL, 3850509554UL, 3925419886UL, 26
28739022UL, 3867556156UL, 2562145937UL, 3124287811UL, 3373329273UL, 4255542
251UL, 3185271749UL, 3693565710UL, 1989726178UL, 1381160022UL, 3642438773UL
, 3042165367UL, 3271478204UL, 3622052196UL, 1263328494UL, 1154237693UL, 101
6814036UL, 3615401444UL, 2786224913UL, 1251034970UL, 1874233020UL, 29560869
71UL, 2572413057UL, 1529490307UL, 3146635362UL, 1436483376UL, 1513970396UL,
3993894153UL, 979247444UL, 2142862852UL, 1368937545UL, 3275665128UL, 13539
04396UL, 294065371UL, 3543274652UL, 911792564UL, 2209636872UL, 3165709748UL
, 103582737UL, 3101998294UL, 730811902UL, 965151434UL, 893293786UL, 1885087
777UL, 1390223786UL, 2657819643UL, 3278634059UL, 3388458110UL, 3325206451UL
, 1897013739UL, 3657020278UL, 4293473749UL, 3025318046UL, 2117065739UL, 486
446521UL, 3675999281UL, 620561205UL, 2085926890UL, 3946214694UL, 3609089773
UL, 2117061768UL, 163384588UL, 868009118UL, 2916136885UL, 1808988481UL, 836
80601UL, 10243015UL, 797410789UL, 3438751781UL, 1391154023UL, 234460119UL,
1278218413UL, 4068940987UL, 3948410079UL, 2814385745UL, 3497626257UL, 12332
72798UL, 3722456098UL, 1767303496UL, 3711787525UL, 2239286063UL, 3968895688
UL, 2030352819UL, 4140631909UL, 2230543409UL, 2173597546UL, 2866251044UL, 3
211336683UL, 1116033475UL, 2032556038UL, 3216540537UL, 4233849723UL, 258958
9144UL, 667272562UL, 1863766055UL, 3931110690UL, 2468422423UL, 1139480458UL
, 272901326UL, 739418419UL, 1377144055UL, 4240143411UL, 1954361769UL, 38393
12742UL, 2814009721UL, 600813233UL, 3976840004UL, 3587775605UL, 4256065219U
L, 2324695947UL, 2293284974UL, 437604123UL, 1534849280UL, 1215859603UL, 327
866771UL, 3356772175UL, 2757237699UL, 720372669UL, 4240407984UL, 1421374867
UL, 3228945021UL, 3284801305UL, 3409069246UL, 4100357988UL, 643362354UL, 10
9899594UL, 1301585321UL, 2528806870UL, 1838904064UL, 448772403UL, 109784974
0UL, 1899994097UL, 618309123UL, 1911948510UL, 2309256224UL, 1861398151UL, 9
05306403UL, 1067595802UL, 36868624UL, 3780886191UL, 835126206UL, 3190251977
UL, 2672497726UL, 2085944002UL, 2912993968UL, 2493776706UL, 667136329UL, 14
74890786UL, 2383346554UL, 943528949UL, 3376706013UL, 2495573574UL, 14495634
5UL, 793159960UL, 1591274917UL, 477107637UL, 1383815442UL, 67384899UL, 2355
242218UL, 1687409818UL, 3801093871UL, 2108217811UL, 3455908733UL, 417216079
7UL, 3935534685UL, 631067839UL, 1187677548UL, 2280856137UL, 3020767646UL, 2
063176246UL, 3736904984UL, 2952933848UL, 2975164686UL, 4144473303UL, 346709
77UL, 1250976509UL, 3484166554UL, 1532744745UL, 225700994UL, 1878713627UL,
2122358980UL, 1456610194UL, 2917522161UL, 2818947075UL, 102678939UL, 537438
58UL, 2095250656UL, 4023979225UL, 3094092874UL, 4128760696UL, 3411610028UL,
3020200609UL, 2225866341UL, 586320946UL, 63813522UL, 1238216159UL, 2825692
263UL, 2169937231UL, 3298517640UL, 1542128261UL, 2205544184UL, 1258655704UL
, 2629012083UL, 4113650203UL, 3198617867UL, 2742310794UL, 3372657381UL, 311
5904410UL, 1948638822UL, 1123521744UL, 1080429281UL, 4086706732UL, 41426932
11UL, 817377147UL, 2570194641UL, 26001503UL, 2861456160UL, 4185725555UL, 25
73003804UL, 1618628779UL, 2588489212UL, 3996192609UL, 1555844274UL, 1003123
505UL, 1326350123UL, 1130583849UL, 3017128756UL, 74119042UL, 4041266437UL,
1938014170UL, 3528465794UL, 4203969698UL, 1913054398UL, 3617979809UL, 22188
10167UL, 2453899816UL, 1997423206UL, 477446533UL, 303090065UL, 757937082UL,
1523238256UL, 3140505311UL, 1422588701UL, 3642014639UL, 1740624195UL, 1276
017154UL, 3072526193UL, 3675105122UL, 1335122682UL, 4080595263UL, 230851942
0UL, 3299182769UL, 1461978532UL, 3098694217UL, 2982399822UL, 3088698511UL,
586759229UL, 3548750902UL, 1449857891UL, 2866451663UL, 2525162286UL, 572946
02UL, 4107991297UL, 1214672265UL, 2940391280UL, 4285346034UL, 3338216759UL,
737207923UL, 4264163846UL, 59219141UL, 2300024654UL, 1876616814UL, 1976543
605UL, 783571061UL, 1724699622UL, 1967524469UL, 1650309916UL, 3322257631UL,
3975521122UL, 273342162UL, 1156754241UL, 185315896UL, 3368133921UL, 663146
55UL, 4153777915UL, 3519901897UL, }, {3672467167UL, 68684525UL, 1738833632U
L, 3081329135UL, 2583806115UL, 2291130512UL, 503032614UL, 3658059597UL, 571
493931UL, 685537959UL, 3498787788UL, 422428426UL, 3879256913UL, 1173158320U
L, 4000800121UL, 298972869UL, 1718342816UL, 2541691685UL, 2490502642UL, 232
1452806UL, 4223212804UL, 1812334632UL, 3717655725UL, 4238191852UL, 30013071
65UL, 2621896355UL, 2572404999UL, 3590094954UL, 760765206UL, 2293618001UL,
1392353032UL, 1733137169UL, 2674005018UL, 4067961151UL, 1505710487UL, 45107
8217UL, 2591688848UL, 12635611UL, 507045428UL, 694822241UL, 1789383090UL, 1
140183890UL, 1720695967UL, 1994318191UL, 3340349873UL, 2793804971UL, 105443
3135UL, 2345087879UL, 3179939285UL, 1651968615UL, 1793223686UL, 1055357758U
L, 914271617UL, 483007580UL, 2127727816UL, 2754998083UL, 3179053982UL, 5984
42002UL, 1950227301UL, 213053613UL, 3566888111UL, 2832258993UL, 4260365359U
L, 443662829UL, 1706542890UL, 3852730296UL, 3643260763UL, 2163607277UL, 181
2905006UL, 171529637UL, 215187467UL, 2369406909UL, 1929000706UL, 2572441025
UL, 2133955541UL, 810692262UL, 1337974799UL, 4030350704UL, 2159178715UL, 37
69451556UL, 1026825278UL, 593628480UL, 1817383139UL, 878832429UL, 225387635
0UL, 203612980UL, 2102950440UL, 3407143936UL, 1912362251UL, 1595387637UL, 2
827580539UL, 305467658UL, 3292706746UL, 44135525UL, 4001933553UL, 369734308
9UL, 760470915UL, 587414402UL, 1419378814UL, 2852774010UL, 3891626781UL, 27
57016765UL, 1090707384UL, 3997074427UL, 1047182100UL, 2855539022UL, 3622915
9UL, 1591415533UL, 3471572739UL, 1237952140UL, 2614469314UL, 213338525UL, 8
86212578UL, 2620301943UL, 713590207UL, 2430496777UL, 1198164420UL, 26448416
98UL, 3654164701UL, 36283572UL, 1461695896UL, 1770331341UL, 1641501876UL, 3
470919184UL, 3181021559UL, 3053795110UL, 3533531372UL, 3134337355UL, 668308
383UL, 388340999UL, 3221275220UL, 1589659138UL, 294382235UL, 1447443579UL,
690177534UL, 1799726917UL, 2838977761UL, 4172949119UL, 2360858031UL, 159385
920UL, 2248389027UL, 1790015671UL, 3925738275UL, 1049918544UL, 4107349511UL
, 1619955951UL, 4188275966UL, 1672572975UL, 2672697497UL, 1863413666UL, 747
724021UL, 4037561738UL, 1605940213UL, 445253292UL, 3362434828UL, 610898209U
L, 1473244091UL, 735444769UL, 1540599852UL, 2449351720UL, 1032410949UL, 367
2467167UL, 68684525UL, 1738833632UL, 3081329135UL, 519684794UL, 2291130512U
L, 503032614UL, 3658059597UL, 571493931UL, 2400186105UL, 3498787788UL, 4224
28426UL, 3879256913UL, 1173158320UL, 4120704752UL, 298972869UL, 1718342816U
L, 2541691685UL, 2490502642UL, 1686027891UL, 4223212804UL, 1812334632UL, 37
17655725UL, 4238191852UL, 642431972UL, 2621896355UL, 2572404999UL, 35900949
54UL, 760765206UL, 2949609717UL, 1392353032UL, 1733137169UL, 2674005018UL,
4067961151UL, 1526077846UL, 451078217UL, 2591688848UL, 12635611UL, 50704542
8UL, 2417951415UL, 1789383090UL, 1140183890UL, 1720695967UL, 1994318191UL,
3465605863UL, 2793804971UL, 1054433135UL, 2345087879UL, 3179939285UL, 30792
97626UL, 1793223686UL, 1055357758UL, 914271617UL, 483007580UL, 306802527UL,
2754998083UL, 3179053982UL, 598442002UL, 1950227301UL, 2473418737UL, 35668
88111UL, 2832258993UL, 4260365359UL, 443662829UL, 2097776414UL, 3852730296U
L, 3643260763UL, 2163607277UL, 1812905006UL, 3957721904UL, 215187467UL, 236
9406909UL, 1929000706UL, 2572441025UL, 3779486126UL, 810692262UL, 133797479
9UL, 4030350704UL, 2159178715UL, 1127012865UL, 1026825278UL, 593628480UL, 1
817383139UL, 878832429UL, 361018423UL, 203612980UL, 2102950440UL, 340714393
6UL, 1912362251UL, 1475218277UL, 2827580539UL, 305467658UL, 3292706746UL, 4
4135525UL, 1900092336UL, 3697343089UL, 760470915UL, 587414402UL, 1419378814
UL, 343303227UL, 3891626781UL, 2757016765UL, 1090707384UL, 3997074427UL, 74
5490961UL, 2855539022UL, 36229159UL, 1591415533UL, 3471572739UL, 3920625546
UL, 2614469314UL, 213338525UL, 886212578UL, 2620301943UL, 827771411UL, 2430
496777UL, 1198164420UL, 2644841698UL, 3654164701UL, 2747674190UL, 146169589
6UL, 1770331341UL, 1641501876UL, 3470919184UL, 919857376UL, 3053795110UL, 3
533531372UL, 3134337355UL, 668308383UL, 201138876UL, 3221275220UL, 15896591
38UL, 294382235UL, 1447443579UL, 4211579707UL, 1799726917UL, 2838977761UL,
4172949119UL, 2360858031UL, 416103844UL, 2248389027UL, 1790015671UL, 392573
8275UL, 1049918544UL, 3481887924UL, 1619955951UL, 4188275966UL, 1672572975U
L, 2672697497UL, 564854400UL, 747724021UL, 4037561738UL, 1605940213UL, 4452
53292UL, 604900912UL, 610898209UL, 1473244091UL, 735444769UL, 1540599852UL,
3036173307UL, 1032410949UL, 3672467167UL, 68684525UL, 1738833632UL, 973022
696UL, 519684794UL, 2291130512UL, 503032614UL, 3658059597UL, 1500301452UL,
2400186105UL, 3498787788UL, 422428426UL, 3879256913UL, 3923611748UL, 412070
4752UL, 298972869UL, 1718342816UL, 2541691685UL, 2323881484UL, 1686027891UL
, 4223212804UL, 1812334632UL, 3717655725UL, 2109094458UL, 642431972UL, 2621
896355UL, 2572404999UL, 3590094954UL, 1837882537UL, 2949609717UL, 139235303
2UL, 1733137169UL, 2674005018UL, 3252348987UL, 1526077846UL, 451078217UL, 2
591688848UL, 12635611UL, 3971261781UL, 2417951415UL, 1789383090UL, 11401838
90UL, 1720695967UL, 2906966040UL, 3465605863UL, 2793804971UL, 1054433135UL,
2345087879UL, 915518921UL, 3079297626UL, 1793223686UL, 1055357758UL, 91427
1617UL, 791633499UL, 306802527UL, 2754998083UL, 3179053982UL, 598442002UL,
324402573UL, 2473418737UL, 3566888111UL, 2832258993UL, 4260365359UL, 216804
6398UL, 2097776414UL, 3852730296UL, 3643260763UL, 2163607277UL, 2595175979U
L, 3957721904UL, 215187467UL, 2369406909UL, 1929000706UL, 657446369UL, 3779
486126UL, 810692262UL, 1337974799UL, 4030350704UL, 1865557469UL, 1127012865
UL, 1026825278UL, 593628480UL, 1817383139UL, 3414354529UL, 361018423UL, 203
612980UL, 2102950440UL, 3407143936UL, 1739372987UL, 1475218277UL, 282758053
9UL, 305467658UL, 3292706746UL, 825045562UL, 1900092336UL, 3697343089UL, 76
0470915UL, 587414402UL, 2000637694UL, 343303227UL, 3891626781UL, 2757016765
UL, 1090707384UL, 4015377800UL, 745490961UL, 2855539022UL, 36229159UL, 1591
415533UL, 2208656873UL, 3920625546UL, 2614469314UL, 213338525UL, 886212578U
L, 2729976209UL, 827771411UL, 2430496777UL, 1198164420UL, 2644841698UL, 192
2667440UL, 2747674190UL, 1461695896UL, 1770331341UL, 1641501876UL, 35753531
1UL, 919857376UL, 3053795110UL, 3533531372UL, 3134337355UL, 1004072597UL, 2
01138876UL, 3221275220UL, 1589659138UL, 294382235UL, 1148950143UL, 42115797
07UL, 1799726917UL, 2838977761UL, 4172949119UL, 892664404UL, 416103844UL, 2
248389027UL, 1790015671UL, 3925738275UL, 2612357890UL, 3481887924UL, 161995
5951UL, 4188275966UL, 1672572975UL, 2005534713UL, 564854400UL, 747724021UL,
4037561738UL, 1605940213UL, 2620990454UL, 604900912UL, 610898209UL, 147324
4091UL, 735444769UL, 3571225334UL, 3036173307UL, 1032410949UL, 3672467167UL
, 68684525UL, 3327351604UL, 973022696UL, 519684794UL, 2291130512UL, 5030326
14UL, 3814902238UL, 1500301452UL, 2400186105UL, 3498787788UL, 422428426UL,
1756753750UL, 3923611748UL, 4120704752UL, 298972869UL, 1718342816UL, 652903
081UL, 2323881484UL, 1686027891UL, 4223212804UL, 1812334632UL, 1599640566UL
, 2109094458UL, 642431972UL, 2621896355UL, 2572404999UL, 1668409355UL, 1837
882537UL, 2949609717UL, 1392353032UL, 1733137169UL, 3691709793UL, 325234898
7UL, 1526077846UL, 451078217UL, 2591688848UL, 3353622601UL, 3971261781UL, 2
417951415UL, 1789383090UL, 1140183890UL, 4113853791UL, 2906966040UL, 346560
5863UL, 2793804971UL, 1054433135UL, 2195882948UL, 915518921UL, 3079297626UL
, 1793223686UL, 1055357758UL, 898713552UL, 791633499UL, 306802527UL, 275499
8083UL, 3179053982UL, 2469350088UL, 324402573UL, 2473418737UL, 3566888111UL
, 2832258993UL, 1377718274UL, 2168046398UL, 2097776414UL, 3852730296UL, 364
3260763UL, 3492388484UL, 2595175979UL, 3957721904UL, 215187467UL, 236940690
9UL, 4243449339UL, 657446369UL, 3779486126UL, 810692262UL, 1337974799UL, 39
60230785UL, 1865557469UL, 1127012865UL, 1026825278UL, 593628480UL, 73279331
2UL, 3414354529UL, 361018423UL, 203612980UL, 2102950440UL, 2401792405UL, 17
39372987UL, 1475218277UL, 2827580539UL, 305467658UL, 2454275289UL, 82504556
2UL, 1900092336UL, 3697343089UL, 760470915UL, 2146882409UL, 2000637694UL, 3
43303227UL, 3891626781UL, 2757016765UL, 3997473261UL, 4015377800UL, 7454909
61UL, 2855539022UL, 36229159UL, 2375394427UL, 2208656873UL, 3920625546UL, 2
614469314UL, 213338525UL, 2055366274UL, 2729976209UL, 827771411UL, 24304967
77UL, 1198164420UL, 1789631187UL, 1922667440UL, 2747674190UL, 1461695896UL,
1770331341UL, 4284442852UL, 357535311UL, 919857376UL, 3053795110UL, 353353
1372UL, 2124270060UL, 1004072597UL, 201138876UL, 3221275220UL, 1589659138UL
, 1418386120UL, 1148950143UL, 4211579707UL, 1799726917UL, 2838977761UL, 354
0708069UL, 892664404UL, 416103844UL, 2248389027UL, 1790015671UL, 3936883UL,
2612357890UL, 3481887924UL, 1619955951UL, 4188275966UL, 2963623483UL, 2005
534713UL, 564854400UL, 747724021UL, 4037561738UL, 3431155922UL, 2620990454U
L, 604900912UL, 610898209UL, 1473244091UL, 3880001339UL, 2879060316UL, 3300
897679UL, 3960972039UL, 3201086624UL, 3814462934UL, 3426650044UL, 193088163
2UL, 1981178788UL, 2956279691UL, 4272406256UL, 372705521UL, 1359389771UL, 1
590302979UL, 3940206208UL, 3817999127UL, 2527835456UL, 2739078164UL, 716997
849UL, 3235607043UL, 2550297745UL, 3688700200UL, 354502605UL, 2285793656UL,
2339138034UL, 3912354142UL, 2262255668UL, 469322622UL, 1319943359UL, 19161
01235UL, 200441823UL, 509436982UL, 2160284593UL, 1687919695UL, 4153615582UL
, 495735041UL, 3694469424UL, 2086893117UL, 4223008799UL, 105344742UL, 16980
33424UL, 1149223145UL, 4183918790UL, 4176151950UL, 415739351UL, 817762972UL
, 3768072560UL, 1931430949UL, 2698979439UL, 3481477932UL, 1994322914UL, 407
8299950UL, 1268233995UL, 3254069145UL, 91029129UL, 498234704UL, 1636613942U
L, 3710087092UL, 3876816560UL, 3510446387UL, 3870169008UL, 1370156410UL, 24
42498047UL, 2324396523UL, 1258730334UL, 621954739UL, 1053015373UL, 49182071
7UL, 3386515432UL, 2203703266UL, 120167176UL, 2383669740UL, 1038666440UL, 2
927342870UL, 3583197824UL, 1236241846UL, 2474675929UL, 679052891UL, 2451259
584UL, 2177706146UL, 606842882UL, 3546980104UL, 2289281509UL, 353873434UL,
2041926837UL, 1238346748UL, 2729109726UL, 2843938395UL, 2938124210UL, 25544
43866UL, 1494477920UL, 693378319UL, 2020963566UL, 2000385949UL, 3744098787U
L, 650307220UL, 2631327075UL, 1529128757UL, 595871428UL, 3206666562UL, 4580
62987UL, 875238192UL, 3729317374UL, 1368843921UL, 3478430230UL, 3234384578U
L, 3232435428UL, 321359326UL, 994274524UL, 361184397UL, 4285497594UL, 91526
3578UL, 1486882838UL, 9988613UL, 829077170UL, 677216046UL, 4141828204UL, 16
5804609UL, 1086678519UL, 2933434608UL, 1351662802UL, 2640085040UL, 26115029
32UL, 2033698714UL, 2008873254UL, 3995557835UL, 1020873906UL, 67873555UL, 2
230337823UL, 1263800417UL, 1148712155UL, 3985159589UL, 2979503513UL, 285471
4997UL, 1539343345UL, 2751484352UL, 1569100732UL, 2020758949UL, 2126757134U
L, 3426641899UL, 2808587825UL, 1953320148UL, 1096398464UL, 1502907172UL, 37
51230087UL, 765557661UL, 765290990UL, 3056075500UL, 2040620632UL, 422573751
UL, 3613558930UL, 1741145769UL, 273531216UL, 837238736UL, 494297893UL, 2903
251124UL, 1636782182UL, 4256592784UL, 3652746656UL, 4258393217UL, }, | |
| }; | | }; | |
| | | | |
| static unsigned int precalc_xorwow_matrix_host[8][800] = { | | static unsigned int precalc_xorwow_matrix_host[8][800] = { | |
| {850664906UL, 2293210629UL, 1517805917UL, 1215500405UL, 1612415445UL, 64538
8200UL, 824349799UL, 3517232886UL, 4075591755UL, 3089899292UL, 4249786064UL
, 3811424903UL, 1100783479UL, 53649761UL, 2817264826UL, 3159462529UL, 16548
48550UL, 950025444UL, 3095510002UL, 4080567211UL, 4111078399UL, 3241719305U
L, 2788212779UL, 4256963770UL, 2426893717UL, 4190211142UL, 1420776905UL, 37
80537969UL, 1102912875UL, 1657948873UL, 3354905256UL, 2519610308UL, 5157776
63UL, 3396785394UL, 1832603711UL, 1154211550UL, 1915690212UL, 1933919046UL,
789578337UL, 337961173UL, 1359089498UL, 2249086205UL, 3417955173UL, 862571
348UL, 528120760UL, 1265685672UL, 1970052076UL, 3585976752UL, 3645339918UL,
312171257UL, 1360991400UL, 1994321680UL, 2327168468UL, 2540437053UL, 11804
83641UL, 2217962701UL, 182726833UL, 590204372UL, 1904496495UL, 2545607041UL
, 3697978033UL, 1084030545UL, 3397906968UL, 2192325323UL, 2704204176UL, 106
9092002UL, 2364406907UL, 1578647245UL, 3561974633UL, 3437665426UL, 14641273
05UL, 1616628807UL, 2243114101UL, 3639967880UL, 1702613633UL, 2437350057UL,
39991274UL, 2024323584UL, 3795072940UL, 3604530798UL, 443099203UL, 6435362
12UL, 1919517328UL, 3931285769UL, 427935569UL, 276421624UL, 2492081750UL, 2
62729512UL, 3088549877UL, 2922650665UL, 1816283755UL, 4246096489UL, 8425759
14UL, 1460435650UL, 3050522190UL, 2640849794UL, 3697925816UL, 3465779075UL,
3856929655UL, 1365559780UL, 2897029415UL, 2747033756UL, 3611830629UL, 1891
542518UL, 1897590206UL, 437451803UL, 677924906UL, 123809117UL, 3940574372UL
, 687640291UL, 3488484529UL, 470218446UL, 1092571016UL, 1537938503UL, 10733
23937UL, 611300083UL, 3809285994UL, 3975678726UL, 925845389UL, 2514775760UL
, 2859302390UL, 2761919483UL, 993285307UL, 164095287UL, 3736193671UL, 20789
46336UL, 1418537059UL, 1202525920UL, 4234029440UL, 1313593624UL, 2484428922
UL, 1833969372UL, 661495122UL, 2217907395UL, 2795045321UL, 2950835531UL, 14
02379354UL, 351314168UL, 1902476749UL, 1914974334UL, 2873973176UL, 13212036
03UL, 3316118265UL, 3282193947UL, 1342191737UL, 793441242UL, 3281524559UL,
296088733UL, 487851702UL, 712098215UL, 1388727135UL, 1705533557UL, 35578002
92UL, 399729516UL, 1355829467UL, 291276309UL, 421164833UL, 1318404599UL, 20
64519128UL, 1161612642UL, 2076623594UL, 850664906UL, 2293210629UL, 15178059
17UL, 1215500405UL, 3847487204UL, 645388200UL, 824349799UL, 3517232886UL, 4
075591755UL, 2755872609UL, 4249786064UL, 3811424903UL, 1100783479UL, 536497
61UL, 1417544262UL, 3159462529UL, 1654848550UL, 950025444UL, 3095510002UL,
1908900347UL, 4111078399UL, 3241719305UL, 2788212779UL, 4256963770UL, 37502
58343UL, 4190211142UL, 1420776905UL, 3780537969UL, 1102912875UL, 1690550UL,
3354905256UL, 2519610308UL, 515777663UL, 3396785394UL, 2658162202UL, 11542
11550UL, 1915690212UL, 1933919046UL, 789578337UL, 189880016UL, 1359089498UL
, 2249086205UL, 3417955173UL, 862571348UL, 998719835UL, 1265685672UL, 19700
52076UL, 3585976752UL, 3645339918UL, 2973042959UL, 1360991400UL, 1994321680
UL, 2327168468UL, 2540437053UL, 2283905032UL, 2217962701UL, 182726833UL, 59
0204372UL, 1904496495UL, 110719262UL, 3697978033UL, 1084030545UL, 339790696
8UL, 2192325323UL, 4133333579UL, 1069092002UL, 2364406907UL, 1578647245UL,
3561974633UL, 3629845331UL, 1464127305UL, 1616628807UL, 2243114101UL, 36399
67880UL, 3256744141UL, 2437350057UL, 39991274UL, 2024323584UL, 3795072940UL
, 1024703328UL, 443099203UL, 643536212UL, 1919517328UL, 3931285769UL, 27551
67056UL, 276421624UL, 2492081750UL, 262729512UL, 3088549877UL, 2817867653UL
, 1816283755UL, 4246096489UL, 842575914UL, 1460435650UL, 2276077438UL, 2640
849794UL, 3697925816UL, 3465779075UL, 3856929655UL, 130551477UL, 2897029415
UL, 2747033756UL, 3611830629UL, 1891542518UL, 804565809UL, 437451803UL, 677
924906UL, 123809117UL, 3940574372UL, 2446610749UL, 3488484529UL, 470218446U
L, 1092571016UL, 1537938503UL, 1502147484UL, 611300083UL, 3809285994UL, 397
5678726UL, 925845389UL, 872826112UL, 2859302390UL, 2761919483UL, 993285307U
L, 164095287UL, 3901654538UL, 2078946336UL, 1418537059UL, 1202525920UL, 423
4029440UL, 704759480UL, 2484428922UL, 1833969372UL, 661495122UL, 2217907395
UL, 3287413716UL, 2950835531UL, 1402379354UL, 351314168UL, 1902476749UL, 20
33316109UL, 2873973176UL, 1321203603UL, 3316118265UL, 3282193947UL, 1316780
684UL, 793441242UL, 3281524559UL, 296088733UL, 487851702UL, 314311643UL, 13
88727135UL, 1705533557UL, 3557800292UL, 399729516UL, 1660074989UL, 29127630
9UL, 421164833UL, 1318404599UL, 2064519128UL, 3156334112UL, 2076623594UL, 8
50664906UL, 2293210629UL, 1517805917UL, 335452425UL, 3847487204UL, 64538820
0UL, 824349799UL, 3517232886UL, 954487767UL, 2755872609UL, 4249786064UL, 38
11424903UL, 1100783479UL, 3408594583UL, 1417544262UL, 3159462529UL, 1654848
550UL, 950025444UL, 324339737UL, 1908900347UL, 4111078399UL, 3241719305UL,
2788212779UL, 1890540205UL, 3750258343UL, 4190211142UL, 1420776905UL, 37805
37969UL, 3716648585UL, 1690550UL, 3354905256UL, 2519610308UL, 515777663UL,
3758156132UL, 2658162202UL, 1154211550UL, 1915690212UL, 1933919046UL, 84414
9171UL, 189880016UL, 1359089498UL, 2249086205UL, 3417955173UL, 1031812215UL
, 998719835UL, 1265685672UL, 1970052076UL, 3585976752UL, 3174204115UL, 2973
042959UL, 1360991400UL, 1994321680UL, 2327168468UL, 714016907UL, 2283905032
UL, 2217962701UL, 182726833UL, 590204372UL, 2151450260UL, 110719262UL, 3697
978033UL, 1084030545UL, 3397906968UL, 767772303UL, 4133333579UL, 1069092002
UL, 2364406907UL, 1578647245UL, 42955292UL, 3629845331UL, 1464127305UL, 161
6628807UL, 2243114101UL, 3222189776UL, 3256744141UL, 2437350057UL, 39991274
UL, 2024323584UL, 3142424684UL, 1024703328UL, 443099203UL, 643536212UL, 191
9517328UL, 918511196UL, 2755167056UL, 276421624UL, 2492081750UL, 262729512U
L, 4246877536UL, 2817867653UL, 1816283755UL, 4246096489UL, 842575914UL, 142
5765936UL, 2276077438UL, 2640849794UL, 3697925816UL, 3465779075UL, 14917025
26UL, 130551477UL, 2897029415UL, 2747033756UL, 3611830629UL, 1844578694UL,
804565809UL, 437451803UL, 677924906UL, 123809117UL, 3419189841UL, 244661074
9UL, 3488484529UL, 470218446UL, 1092571016UL, 3272535988UL, 1502147484UL, 6
11300083UL, 3809285994UL, 3975678726UL, 2853681168UL, 872826112UL, 28593023
90UL, 2761919483UL, 993285307UL, 1434560128UL, 3901654538UL, 2078946336UL,
1418537059UL, 1202525920UL, 2530097881UL, 704759480UL, 2484428922UL, 183396
9372UL, 661495122UL, 503878844UL, 3287413716UL, 2950835531UL, 1402379354UL,
351314168UL, 4131886119UL, 2033316109UL, 2873973176UL, 1321203603UL, 33161
18265UL, 237900321UL, 1316780684UL, 793441242UL, 3281524559UL, 296088733UL,
1730738847UL, 314311643UL, 1388727135UL, 1705533557UL, 3557800292UL, 15538
35665UL, 1660074989UL, 291276309UL, 421164833UL, 1318404599UL, 964731488UL,
3156334112UL, 2076623594UL, 850664906UL, 2293210629UL, 1105350579UL, 33545
2425UL, 3847487204UL, 645388200UL, 824349799UL, 2789953706UL, 954487767UL,
2755872609UL, 4249786064UL, 3811424903UL, 3937839949UL, 3408594583UL, 14175
44262UL, 3159462529UL, 1654848550UL, 624060530UL, 324339737UL, 1908900347UL
, 4111078399UL, 3241719305UL, 2294919498UL, 1890540205UL, 3750258343UL, 419
0211142UL, 1420776905UL, 2279133729UL, 3716648585UL, 1690550UL, 3354905256U
L, 2519610308UL, 3563975602UL, 3758156132UL, 2658162202UL, 1154211550UL, 19
15690212UL, 3505586122UL, 844149171UL, 189880016UL, 1359089498UL, 224908620
5UL, 2389487504UL, 1031812215UL, 998719835UL, 1265685672UL, 1970052076UL, 2
798611919UL, 3174204115UL, 2973042959UL, 1360991400UL, 1994321680UL, 168413
4678UL, 714016907UL, 2283905032UL, 2217962701UL, 182726833UL, 1734988742UL,
2151450260UL, 110719262UL, 3697978033UL, 1084030545UL, 159906818UL, 767772
303UL, 4133333579UL, 1069092002UL, 2364406907UL, 1290801202UL, 42955292UL,
3629845331UL, 1464127305UL, 1616628807UL, 987794861UL, 3222189776UL, 325674
4141UL, 2437350057UL, 39991274UL, 3644076751UL, 3142424684UL, 1024703328UL,
443099203UL, 643536212UL, 1487589384UL, 918511196UL, 2755167056UL, 2764216
24UL, 2492081750UL, 137688638UL, 4246877536UL, 2817867653UL, 1816283755UL,
4246096489UL, 1518475380UL, 1425765936UL, 2276077438UL, 2640849794UL, 36979
25816UL, 4226506771UL, 1491702526UL, 130551477UL, 2897029415UL, 2747033756U
L, 2033599579UL, 1844578694UL, 804565809UL, 437451803UL, 677924906UL, 27490
65512UL, 3419189841UL, 2446610749UL, 3488484529UL, 470218446UL, 290444026UL
, 3272535988UL, 1502147484UL, 611300083UL, 3809285994UL, 2546040767UL, 2853
681168UL, 872826112UL, 2859302390UL, 2761919483UL, 4097961150UL, 1434560128
UL, 3901654538UL, 2078946336UL, 1418537059UL, 2725734455UL, 2530097881UL, 7
04759480UL, 2484428922UL, 1833969372UL, 3999408333UL, 503878844UL, 32874137
16UL, 2950835531UL, 1402379354UL, 3861442503UL, 4131886119UL, 2033316109UL,
2873973176UL, 1321203603UL, 1267331405UL, 237900321UL, 1316780684UL, 79344
1242UL, 3281524559UL, 1273427916UL, 1730738847UL, 314311643UL, 1388727135UL
, 1705533557UL, 1474310231UL, 1553835665UL, 1660074989UL, 291276309UL, 4211
64833UL, 3884815658UL, 3088049345UL, 3307042227UL, 3228948601UL, 1717605083
UL, 1864502063UL, 3799516572UL, 2372822470UL, 2691586476UL, 1172840854UL, 1
577099080UL, 870101866UL, 2139291021UL, 406996656UL, 255568268UL, 897760202
UL, 674745664UL, 885214361UL, 3753233375UL, 3015215223UL, 1711461259UL, 324
1363282UL, 2125360928UL, 2493601640UL, 2350228245UL, 3434627328UL, 20956429
63UL, 3360932494UL, 3287396242UL, 4070512427UL, 3415702664UL, 1958354224UL,
3280206940UL, 3929504236UL, 3390499817UL, 4144225735UL, 3621750606UL, 3205
006592UL, 3495743785UL, 269239326UL, 2181299371UL, 2898796651UL, 2613623219
UL, 3988711298UL, 2162437858UL, 949553433UL, 3289670000UL, 3559525307UL, 33
66925567UL, 2112148665UL, 955626393UL, 1790865381UL, 699223558UL, 388958430
1UL, 1020750250UL, 4105283899UL, 2295851818UL, 4045668915UL, 2224770025UL,
766386910UL, 4265157386UL, 89139307UL, 2099710177UL, 1012450874UL, 18754924
46UL, 1927399417UL, 767450812UL, 654474783UL, 4265293038UL, 4041215389UL, 4
102336947UL, 4263617328UL, 2135826340UL, 2317231535UL, 3773895729UL, 403151
111UL, 1400693138UL, 4255050194UL, 755369466UL, 2325764302UL, 2617301159UL,
4165707294UL, 1206304709UL, 2415645397UL, 4276004841UL, 1457022279UL, 6626
60652UL, 795140282UL, 828519889UL, 805830562UL, 1179976369UL, 2212548232UL,
755708248UL, 1034682071UL, 899950902UL, 1906046264UL, 1861009040UL, 310711
525UL, 920739741UL, 2322414272UL, 3179236470UL, 81822135UL, 4111390320UL, 1
800166783UL, 112253014UL, 688771939UL, 1050990794UL, 3124647483UL, 28705217
1UL, 1363630156UL, 3447798279UL, 1405733552UL, 3075862538UL, 1682808202UL,
1595154222UL, 1173705692UL, 680713285UL, 2748212230UL, 568610527UL, 3434965
538UL, 1114942930UL, 2835858745UL, 2575992250UL, 3243355150UL, 2127580225UL
, 1855934450UL, 3915941751UL, 2228679809UL, 1514780124UL, 1506688039UL, 103
3083295UL, 793807083UL, 1120681149UL, 4105670165UL, 3999570340UL, 208302013
1UL, 1213356023UL, 3684882757UL, 3375797774UL, 3577986103UL, 2092046164UL,
2593847443UL, 1826450612UL, 367828409UL, 3198272513UL, 1941316667UL, 943707
510UL, 907134807UL, 2020457947UL, 1462193665UL, 2964617539UL, 4216491663UL,
2625270800UL, 2395371467UL, 3691003028UL, 3659016793UL, 2381847054UL, 3513
105567UL, 3013019506UL, 2731245927UL, }, {1680024716UL, 2112340059UL, 33874
75367UL, 2080916186UL, 1431532386UL, 3907378472UL, 2636491350UL, 2176128529
UL, 2236616671UL, 3736851460UL, 2604001339UL, 3893075234UL, 3495918635UL, 4
116370522UL, 1384310379UL, 3660102574UL, 2030233939UL, 2759207091UL, 493479
23UL, 97526506UL, 2566932710UL, 1566181275UL, 3127827248UL, 578401670UL, 14
99229308UL, 2581732444UL, 279715551UL, 809690877UL, 1438444015UL, 878935323
UL, 1495277039UL, 3417305339UL, 2858903785UL, 3074075088UL, 603749086UL, 23
70669734UL, 391683868UL, 3933465331UL, 2884128106UL, 1478317876UL, 18649883
35UL, 2925823809UL, 4133578805UL, 218104493UL, 368652174UL, 1998600344UL, 1
109346044UL, 1716435313UL, 415435111UL, 91393686UL, 2536620737UL, 144006857
3UL, 481874870UL, 142128108UL, 988825519UL, 2077118779UL, 2858045339UL, 406
8162251UL, 115593872UL, 1364244587UL, 3550167006UL, 3728768059UL, 177242368
5UL, 2504624145UL, 248732306UL, 1412607307UL, 4081166331UL, 154438218UL, 16
52901877UL, 3932533490UL, 3142799969UL, 3154073676UL, 3112018078UL, 2757873
595UL, 2364830126UL, 2855791484UL, 793851407UL, 507785167UL, 263713916UL, 4
060700051UL, 3291978358UL, 1584226715UL, 2546417990UL, 450747961UL, 2951067
700UL, 2706009093UL, 1788578194UL, 4030171132UL, 2610979903UL, 573420740UL,
4269115622UL, 2180305819UL, 2646894726UL, 716649335UL, 3875715683UL, 85342
8184UL, 2436760738UL, 4190071217UL, 2754423535UL, 540698101UL, 4082489821UL
, 741976046UL, 267559495UL, 1591532642UL, 2500610323UL, 3203248679UL, 14731
2102UL, 2772368222UL, 1412987047UL, 2295185573UL, 1932341300UL, 898396308UL
, 1837129999UL, 3113914292UL, 2613354524UL, 3141601915UL, 276087167UL, 1887
389351UL, 757801450UL, 3752353732UL, 2745818074UL, 1442953464UL, 3802648347
UL, 223728071UL, 2169947402UL, 1338125300UL, 3642174036UL, 2794462634UL, 23
26349851UL, 862746036UL, 3577092599UL, 627103363UL, 552173564UL, 4142604459
UL, 2310329406UL, 583522272UL, 189323282UL, 1217612313UL, 73550248UL, 24346
92829UL, 2757269706UL, 2392210091UL, 3032922600UL, 3573904125UL, 2897178037
UL, 2632631469UL, 3085332665UL, 3775619904UL, 2563291734UL, 1351375865UL, 4
043427793UL, 1803743084UL, 3112116579UL, 522940594UL, 2690374983UL, 2613871
529UL, 3810037031UL, 1765642390UL, 534554747UL, 1930852049UL, 2264349344UL,
1680024716UL, 2112340059UL, 3387475367UL, 2080916186UL, 75966494UL, 390737
8472UL, 2636491350UL, 2176128529UL, 2236616671UL, 2372987046UL, 2604001339U
L, 3893075234UL, 3495918635UL, 4116370522UL, 534929913UL, 3660102574UL, 203
0233939UL, 2759207091UL, 49347923UL, 987575186UL, 2566932710UL, 1566181275U
L, 3127827248UL, 578401670UL, 3731513754UL, 2581732444UL, 279715551UL, 8096
90877UL, 1438444015UL, 2185866850UL, 1495277039UL, 3417305339UL, 2858903785
UL, 3074075088UL, 4198538376UL, 2370669734UL, 391683868UL, 3933465331UL, 28
84128106UL, 1400216510UL, 1864988335UL, 2925823809UL, 4133578805UL, 2181044
93UL, 2798390374UL, 1998600344UL, 1109346044UL, 1716435313UL, 415435111UL,
1892535124UL, 2536620737UL, 1440068573UL, 481874870UL, 142128108UL, 3290827
40UL, 2077118779UL, 2858045339UL, 4068162251UL, 115593872UL, 2644000449UL,
3550167006UL, 3728768059UL, 1772423685UL, 2504624145UL, 2140118619UL, 14126
07307UL, 4081166331UL, 154438218UL, 1652901877UL, 3804911318UL, 3142799969U
L, 3154073676UL, 3112018078UL, 2757873595UL, 50297646UL, 2855791484UL, 7938
51407UL, 507785167UL, 263713916UL, 3324588195UL, 3291978358UL, 1584226715UL
, 2546417990UL, 450747961UL, 3455625012UL, 2706009093UL, 1788578194UL, 4030
171132UL, 2610979903UL, 3835380965UL, 4269115622UL, 2180305819UL, 264689472
6UL, 716649335UL, 2607142354UL, 853428184UL, 2436760738UL, 4190071217UL, 27
54423535UL, 456808691UL, 4082489821UL, 741976046UL, 267559495UL, 1591532642
UL, 2722205042UL, 3203248679UL, 147312102UL, 2772368222UL, 1412987047UL, 19
50543946UL, 1932341300UL, 898396308UL, 1837129999UL, 3113914292UL, 42861639
2UL, 3141601915UL, 276087167UL, 1887389351UL, 757801450UL, 963534966UL, 274
5818074UL, 1442953464UL, 3802648347UL, 223728071UL, 229039300UL, 1338125300
UL, 3642174036UL, 2794462634UL, 2326349851UL, 206115203UL, 3577092599UL, 62
7103363UL, 552173564UL, 4142604459UL, 1492461846UL, 583522272UL, 189323282U
L, 1217612313UL, 73550248UL, 3552211807UL, 2757269706UL, 2392210091UL, 3032
922600UL, 3573904125UL, 810640644UL, 2632631469UL, 3085332665UL, 3775619904
UL, 2563291734UL, 922608790UL, 4043427793UL, 1803743084UL, 3112116579UL, 52
2940594UL, 1785093944UL, 2613871529UL, 3810037031UL, 1765642390UL, 53455474
7UL, 3528050076UL, 2264349344UL, 1680024716UL, 2112340059UL, 3387475367UL,
3295682653UL, 75966494UL, 3907378472UL, 2636491350UL, 2176128529UL, 3574915
532UL, 2372987046UL, 2604001339UL, 3893075234UL, 3495918635UL, 1280296085UL
, 534929913UL, 3660102574UL, 2030233939UL, 2759207091UL, 299776535UL, 98757
5186UL, 2566932710UL, 1566181275UL, 3127827248UL, 3874691533UL, 3731513754U
L, 2581732444UL, 279715551UL, 809690877UL, 3100791084UL, 2185866850UL, 1495
277039UL, 3417305339UL, 2858903785UL, 1310351481UL, 4198538376UL, 237066973
4UL, 391683868UL, 3933465331UL, 2749085130UL, 1400216510UL, 1864988335UL, 2
925823809UL, 4133578805UL, 3352814594UL, 2798390374UL, 1998600344UL, 110934
6044UL, 1716435313UL, 1571752941UL, 1892535124UL, 2536620737UL, 1440068573U
L, 481874870UL, 2485033697UL, 329082740UL, 2077118779UL, 2858045339UL, 4068
162251UL, 3837440666UL, 2644000449UL, 3550167006UL, 3728768059UL, 177242368
5UL, 1176559812UL, 2140118619UL, 1412607307UL, 4081166331UL, 154438218UL, 2
902622972UL, 3804911318UL, 3142799969UL, 3154073676UL, 3112018078UL, 240339
1233UL, 50297646UL, 2855791484UL, 793851407UL, 507785167UL, 2351826747UL, 3
324588195UL, 3291978358UL, 1584226715UL, 2546417990UL, 746876926UL, 3455625
012UL, 2706009093UL, 1788578194UL, 4030171132UL, 3779307353UL, 3835380965UL
, 4269115622UL, 2180305819UL, 2646894726UL, 2602235234UL, 2607142354UL, 853
428184UL, 2436760738UL, 4190071217UL, 2066757692UL, 456808691UL, 4082489821
UL, 741976046UL, 267559495UL, 3001080633UL, 2722205042UL, 3203248679UL, 147
312102UL, 2772368222UL, 89950260UL, 1950543946UL, 1932341300UL, 898396308UL
, 1837129999UL, 947911286UL, 428616392UL, 3141601915UL, 276087167UL, 188738
9351UL, 2583987247UL, 963534966UL, 2745818074UL, 1442953464UL, 3802648347UL
, 4229124441UL, 229039300UL, 1338125300UL, 3642174036UL, 2794462634UL, 2472
155633UL, 206115203UL, 3577092599UL, 627103363UL, 552173564UL, 2586882739UL
, 1492461846UL, 583522272UL, 189323282UL, 1217612313UL, 3501549884UL, 35522
11807UL, 2757269706UL, 2392210091UL, 3032922600UL, 740675778UL, 810640644UL
, 2632631469UL, 3085332665UL, 3775619904UL, 3643289881UL, 922608790UL, 4043
427793UL, 1803743084UL, 3112116579UL, 2213337398UL, 1785093944UL, 261387152
9UL, 3810037031UL, 1765642390UL, 762472016UL, 3528050076UL, 2264349344UL, 1
680024716UL, 2112340059UL, 1372272974UL, 3295682653UL, 75966494UL, 39073784
72UL, 2636491350UL, 3117471955UL, 3574915532UL, 2372987046UL, 2604001339UL,
3893075234UL, 915576383UL, 1280296085UL, 534929913UL, 3660102574UL, 203023
3939UL, 346368350UL, 299776535UL, 987575186UL, 2566932710UL, 1566181275UL,
3535223896UL, 3874691533UL, 3731513754UL, 2581732444UL, 279715551UL, 245689
4951UL, 3100791084UL, 2185866850UL, 1495277039UL, 3417305339UL, 1618871086U
L, 1310351481UL, 4198538376UL, 2370669734UL, 391683868UL, 2009676005UL, 274
9085130UL, 1400216510UL, 1864988335UL, 2925823809UL, 58955107UL, 3352814594
UL, 2798390374UL, 1998600344UL, 1109346044UL, 3273979614UL, 1571752941UL, 1
892535124UL, 2536620737UL, 1440068573UL, 1174168447UL, 2485033697UL, 329082
740UL, 2077118779UL, 2858045339UL, 4062921629UL, 3837440666UL, 2644000449UL
, 3550167006UL, 3728768059UL, 2642133401UL, 1176559812UL, 2140118619UL, 141
2607307UL, 4081166331UL, 3124905304UL, 2902622972UL, 3804911318UL, 31427999
69UL, 3154073676UL, 1449454613UL, 2403391233UL, 50297646UL, 2855791484UL, 7
93851407UL, 3514201526UL, 2351826747UL, 3324588195UL, 3291978358UL, 1584226
715UL, 3636681672UL, 746876926UL, 3455625012UL, 2706009093UL, 1788578194UL,
3451519459UL, 3779307353UL, 3835380965UL, 4269115622UL, 2180305819UL, 3987
989524UL, 2602235234UL, 2607142354UL, 853428184UL, 2436760738UL, 2151617107
UL, 2066757692UL, 456808691UL, 4082489821UL, 741976046UL, 3590081269UL, 300
1080633UL, 2722205042UL, 3203248679UL, 147312102UL, 3432947806UL, 89950260U
L, 1950543946UL, 1932341300UL, 898396308UL, 3828432864UL, 947911286UL, 4286
16392UL, 3141601915UL, 276087167UL, 2517666433UL, 2583987247UL, 963534966UL
, 2745818074UL, 1442953464UL, 2223986807UL, 4229124441UL, 229039300UL, 1338
125300UL, 3642174036UL, 1053796945UL, 2472155633UL, 206115203UL, 3577092599
UL, 627103363UL, 1113276084UL, 2586882739UL, 1492461846UL, 583522272UL, 189
323282UL, 1490604990UL, 3501549884UL, 3552211807UL, 2757269706UL, 239221009
1UL, 3545407532UL, 740675778UL, 810640644UL, 2632631469UL, 3085332665UL, 75
5862267UL, 3643289881UL, 922608790UL, 4043427793UL, 1803743084UL, 195416663
0UL, 2213337398UL, 1785093944UL, 2613871529UL, 3810037031UL, 3042935707UL,
3162182177UL, 2791346436UL, 1901925289UL, 863100941UL, 3367519168UL, 197262
3238UL, 3664303070UL, 604922059UL, 3026817982UL, 1436412310UL, 4096180631UL
, 1597561857UL, 4206212303UL, 4127914332UL, 3228677359UL, 3985733659UL, 359
7290113UL, 4251197894UL, 3451370603UL, 609679338UL, 3360835257UL, 137223988
5UL, 638572328UL, 3806422284UL, 3974147336UL, 1804280837UL, 4209089291UL, 2
021797469UL, 3557188838UL, 409727186UL, 2114649178UL, 687702120UL, 25424459
92UL, 1235991799UL, 460479179UL, 2008348175UL, 887884478UL, 3942327811UL, 2
999928223UL, 4171339789UL, 2286339235UL, 1293442231UL, 1575942850UL, 761224
75UL, 1440527701UL, 2006558403UL, 1544148172UL, 895899367UL, 681826913UL, 4
094701935UL, 3995413790UL, 1027509154UL, 2264990896UL, 1938238113UL, 213430
250UL, 222469320UL, 609726517UL, 3581538106UL, 492802663UL, 120480843UL, 17
20004062UL, 1132674507UL, 911082758UL, 2909148131UL, 566658805UL, 396411444
5UL, 3483602509UL, 1793438750UL, 165562604UL, 3641830063UL, 2394205521UL, 3
404874822UL, 1672998096UL, 916151953UL, 1141264477UL, 3171661340UL, 3803396
219UL, 3018337382UL, 1863902683UL, 2474641928UL, 3250365071UL, 3897886220UL
, 1219701051UL, 51332576UL, 1358614881UL, 1707407492UL, 3670647816UL, 92335
7625UL, 343687395UL, 3991339686UL, 3913575403UL, 1267727936UL, 4001357856UL
, 3820224848UL, 2942896724UL, 3505936742UL, 1403285299UL, 1992762049UL, 567
748449UL, 2202721585UL, 2781324216UL, 1724850068UL, 2408314541UL, 307397581
3UL, 3992810029UL, 2475242354UL, 540562053UL, 2185198943UL, 3759352041UL, 3
373885614UL, 1132999410UL, 1097554565UL, 4089342358UL, 3239542922UL, 245174
8646UL, 407290679UL, 3188103200UL, 1708016248UL, 26848241UL, 2796711130UL,
3090711568UL, 4068389322UL, 3420916085UL, 3137567033UL, 2877819818UL, 22133
454UL, 4629160UL, 3703695249UL, 1920151708UL, 1175452162UL, 130015299UL, 33
31834713UL, 1099225384UL, 689254331UL, 1851083761UL, 2654970209UL, 32592979
36UL, 3742819314UL, 3524284766UL, 2291819083UL, 3494031861UL, 16242889UL, 3
545082774UL, 1997878108UL, 777447699UL, 4244916543UL, 3508640253UL, 3782278
393UL, 2107258964UL, 2139074576UL, 1383217899UL, 2337934322UL, 3181899620UL
, 1285955765UL, 2989610020UL, 3326862146UL, 1168587380UL, 801203532UL, 3020
809957UL, }, {3810471203UL, 1017064446UL, 1595207573UL, 441087832UL, 332674
6890UL, 3294064431UL, 167972517UL, 3625210015UL, 1011845006UL, 2980240819UL
, 1778354660UL, 3041730987UL, 1598611350UL, 2015169745UL, 2321724978UL, 339
0812967UL, 2432904511UL, 113261909UL, 3957193232UL, 3806115908UL, 296582892
9UL, 2035392295UL, 3500116619UL, 2881232416UL, 1672212265UL, 1607201428UL,
425148945UL, 1262591961UL, 2221781268UL, 4215047456UL, 2148245850UL, 278748
8981UL, 1077262192UL, 2085467561UL, 3053954888UL, 3584435116UL, 3013084787U
L, 287099941UL, 1290407232UL, 4078552287UL, 2658945475UL, 4251530898UL, 240
3086478UL, 2884923598UL, 3545110453UL, 4105390090UL, 343200643UL, 318988882
1UL, 4086304363UL, 3466483195UL, 259435633UL, 2846377387UL, 497258846UL, 27
2775541UL, 985737911UL, 2957688879UL, 2180784344UL, 3434619542UL, 364338483
8UL, 2228652440UL, 3107480718UL, 2208729807UL, 596436263UL, 3255120711UL, 3
248886970UL, 519242965UL, 602979109UL, 1619614UL, 1391563565UL, 56262588UL,
1584463910UL, 1849038201UL, 728022295UL, 848624947UL, 1813827408UL, 428214
945UL, 1246345586UL, 4213351865UL, 168985863UL, 456608054UL, 4277869380UL,
3886828599UL, 2264054549UL, 3110967170UL, 3138175314UL, 2649164828UL, 33693
78320UL, 3648350039UL, 3524848759UL, 1468470706UL, 3558859222UL, 2669673235
UL, 831851874UL, 4285651092UL, 4224147373UL, 1088456706UL, 231954609UL, 311
8005852UL, 225508069UL, 883105389UL, 856371341UL, 2001356578UL, 639336670UL
, 2363501707UL, 3622399552UL, 4024065226UL, 1093546838UL, 4263608561UL, 185
2072422UL, 425195042UL, 2441102396UL, 296426333UL, 384641750UL, 3559334435U
L, 1757327033UL, 1016016207UL, 3595686646UL, 24777793UL, 623926105UL, 21691
95923UL, 1779396793UL, 646997837UL, 1459728476UL, 2644865980UL, 1994581089U
L, 3956278544UL, 919592580UL, 2153558858UL, 2029633394UL, 3837501009UL, 401
6560170UL, 484838096UL, 3652199054UL, 1971790561UL, 605295089UL, 637470291U
L, 278970544UL, 3574824693UL, 295866521UL, 1755035156UL, 2542341803UL, 1588
716357UL, 1502596918UL, 4124554133UL, 3547049843UL, 1768033045UL, 153173463
0UL, 101448323UL, 3233017580UL, 1793222944UL, 3187853500UL, 186000900UL, 80
3444571UL, 2820254958UL, 2009384608UL, 2384668855UL, 2222812920UL, 63360866
5UL, 2028480056UL, 1258028235UL, 545095949UL, 3810471203UL, 1017064446UL, 1
595207573UL, 441087832UL, 899068662UL, 3294064431UL, 167972517UL, 362521001
5UL, 1011845006UL, 3951305793UL, 1778354660UL, 3041730987UL, 1598611350UL,
2015169745UL, 1885149424UL, 3390812967UL, 2432904511UL, 113261909UL, 395719
3232UL, 3953443155UL, 2965828929UL, 2035392295UL, 3500116619UL, 2881232416U
L, 329153573UL, 1607201428UL, 425148945UL, 1262591961UL, 2221781268UL, 7802
8761UL, 2148245850UL, 2787488981UL, 1077262192UL, 2085467561UL, 647235899UL
, 3584435116UL, 3013084787UL, 287099941UL, 1290407232UL, 1467385694UL, 2658
945475UL, 4251530898UL, 2403086478UL, 2884923598UL, 3489351040UL, 410539009
0UL, 343200643UL, 3189888821UL, 4086304363UL, 3521512280UL, 259435633UL, 28
46377387UL, 497258846UL, 272775541UL, 1367093111UL, 2957688879UL, 218078434
4UL, 3434619542UL, 3643384838UL, 411877686UL, 3107480718UL, 2208729807UL, 5
96436263UL, 3255120711UL, 584605030UL, 519242965UL, 602979109UL, 1619614UL,
1391563565UL, 3902518209UL, 1584463910UL, 1849038201UL, 728022295UL, 84862
4947UL, 1932969318UL, 428214945UL, 1246345586UL, 4213351865UL, 168985863UL,
2770345237UL, 4277869380UL, 3886828599UL, 2264054549UL, 3110967170UL, 2953
581033UL, 2649164828UL, 3369378320UL, 3648350039UL, 3524848759UL, 238035397
7UL, 3558859222UL, 2669673235UL, 831851874UL, 4285651092UL, 1214052447UL, 1
088456706UL, 231954609UL, 3118005852UL, 225508069UL, 1766983646UL, 85637134
1UL, 2001356578UL, 639336670UL, 2363501707UL, 1782816591UL, 4024065226UL, 1
093546838UL, 4263608561UL, 1852072422UL, 1149716600UL, 2441102396UL, 296426
333UL, 384641750UL, 3559334435UL, 2391309970UL, 1016016207UL, 3595686646UL,
24777793UL, 623926105UL, 362098678UL, 1779396793UL, 646997837UL, 145972847
6UL, 2644865980UL, 3238673748UL, 3956278544UL, 919592580UL, 2153558858UL, 2
029633394UL, 115778559UL, 4016560170UL, 484838096UL, 3652199054UL, 19717905
61UL, 737357475UL, 637470291UL, 278970544UL, 3574824693UL, 295866521UL, 398
9745853UL, 2542341803UL, 1588716357UL, 1502596918UL, 4124554133UL, 30168497
44UL, 1768033045UL, 1531734630UL, 101448323UL, 3233017580UL, 4157527581UL,
3187853500UL, 186000900UL, 803444571UL, 2820254958UL, 1980528062UL, 2384668
855UL, 2222812920UL, 633608665UL, 2028480056UL, 3166710281UL, 545095949UL,
3810471203UL, 1017064446UL, 1595207573UL, 693962828UL, 899068662UL, 3294064
431UL, 167972517UL, 3625210015UL, 1486040398UL, 3951305793UL, 1778354660UL,
3041730987UL, 1598611350UL, 2859363132UL, 1885149424UL, 3390812967UL, 2432
904511UL, 113261909UL, 664880478UL, 3953443155UL, 2965828929UL, 2035392295U
L, 3500116619UL, 558081801UL, 329153573UL, 1607201428UL, 425148945UL, 12625
91961UL, 3716247699UL, 78028761UL, 2148245850UL, 2787488981UL, 1077262192UL
, 4206362947UL, 647235899UL, 3584435116UL, 3013084787UL, 287099941UL, 25367
81098UL, 1467385694UL, 2658945475UL, 4251530898UL, 2403086478UL, 3075072413
UL, 3489351040UL, 4105390090UL, 343200643UL, 3189888821UL, 2540485172UL, 35
21512280UL, 259435633UL, 2846377387UL, 497258846UL, 2442427327UL, 136709311
1UL, 2957688879UL, 2180784344UL, 3434619542UL, 1593967423UL, 411877686UL, 3
107480718UL, 2208729807UL, 596436263UL, 1048686529UL, 584605030UL, 51924296
5UL, 602979109UL, 1619614UL, 2072745381UL, 3902518209UL, 1584463910UL, 1849
038201UL, 728022295UL, 846033949UL, 1932969318UL, 428214945UL, 1246345586UL
, 4213351865UL, 1066373275UL, 2770345237UL, 4277869380UL, 3886828599UL, 226
4054549UL, 1877859690UL, 2953581033UL, 2649164828UL, 3369378320UL, 36483500
39UL, 2537763389UL, 2380353977UL, 3558859222UL, 2669673235UL, 831851874UL,
522748140UL, 1214052447UL, 1088456706UL, 231954609UL, 3118005852UL, 1381269
315UL, 1766983646UL, 856371341UL, 2001356578UL, 639336670UL, 667275675UL, 1
782816591UL, 4024065226UL, 1093546838UL, 4263608561UL, 2057337961UL, 114971
6600UL, 2441102396UL, 296426333UL, 384641750UL, 340523210UL, 2391309970UL,
1016016207UL, 3595686646UL, 24777793UL, 3094832341UL, 362098678UL, 17793967
93UL, 646997837UL, 1459728476UL, 1169681568UL, 3238673748UL, 3956278544UL,
919592580UL, 2153558858UL, 388335108UL, 115778559UL, 4016560170UL, 48483809
6UL, 3652199054UL, 1764858181UL, 737357475UL, 637470291UL, 278970544UL, 357
4824693UL, 3671458900UL, 3989745853UL, 2542341803UL, 1588716357UL, 15025969
18UL, 2102871406UL, 3016849744UL, 1768033045UL, 1531734630UL, 101448323UL,
3964942332UL, 4157527581UL, 3187853500UL, 186000900UL, 803444571UL, 3425652
083UL, 1980528062UL, 2384668855UL, 2222812920UL, 633608665UL, 3035373876UL,
3166710281UL, 545095949UL, 3810471203UL, 1017064446UL, 669282349UL, 693962
828UL, 899068662UL, 3294064431UL, 167972517UL, 2007256988UL, 1486040398UL,
3951305793UL, 1778354660UL, 3041730987UL, 2827768941UL, 2859363132UL, 18851
49424UL, 3390812967UL, 2432904511UL, 3700915653UL, 664880478UL, 3953443155U
L, 2965828929UL, 2035392295UL, 1461208330UL, 558081801UL, 329153573UL, 1607
201428UL, 425148945UL, 1700881129UL, 3716247699UL, 78028761UL, 2148245850UL
, 2787488981UL, 2706775080UL, 4206362947UL, 647235899UL, 3584435116UL, 3013
084787UL, 2958545221UL, 2536781098UL, 1467385694UL, 2658945475UL, 425153089
8UL, 2241012567UL, 3075072413UL, 3489351040UL, 4105390090UL, 343200643UL, 4
90164649UL, 2540485172UL, 3521512280UL, 259435633UL, 2846377387UL, 40736118
31UL, 2442427327UL, 1367093111UL, 2957688879UL, 2180784344UL, 1835510773UL,
1593967423UL, 411877686UL, 3107480718UL, 2208729807UL, 3306732468UL, 10486
86529UL, 584605030UL, 519242965UL, 602979109UL, 2978864605UL, 2072745381UL,
3902518209UL, 1584463910UL, 1849038201UL, 3284115169UL, 846033949UL, 19329
69318UL, 428214945UL, 1246345586UL, 194166002UL, 1066373275UL, 2770345237UL
, 4277869380UL, 3886828599UL, 1874087886UL, 1877859690UL, 2953581033UL, 264
9164828UL, 3369378320UL, 4145454028UL, 2537763389UL, 2380353977UL, 35588592
22UL, 2669673235UL, 739345884UL, 522748140UL, 1214052447UL, 1088456706UL, 2
31954609UL, 3605603781UL, 1381269315UL, 1766983646UL, 856371341UL, 20013565
78UL, 2049940324UL, 667275675UL, 1782816591UL, 4024065226UL, 1093546838UL,
152524382UL, 2057337961UL, 1149716600UL, 2441102396UL, 296426333UL, 3195130
788UL, 340523210UL, 2391309970UL, 1016016207UL, 3595686646UL, 180492441UL,
3094832341UL, 362098678UL, 1779396793UL, 646997837UL, 2458167607UL, 1169681
568UL, 3238673748UL, 3956278544UL, 919592580UL, 3421005218UL, 388335108UL,
115778559UL, 4016560170UL, 484838096UL, 2649676374UL, 1764858181UL, 7373574
75UL, 637470291UL, 278970544UL, 2236401278UL, 3671458900UL, 3989745853UL, 2
542341803UL, 1588716357UL, 1241570134UL, 2102871406UL, 3016849744UL, 176803
3045UL, 1531734630UL, 1765654724UL, 3964942332UL, 4157527581UL, 3187853500U
L, 186000900UL, 2189716659UL, 3425652083UL, 1980528062UL, 2384668855UL, 222
2812920UL, 3955466207UL, 2426547616UL, 3846752458UL, 3015538636UL, 23425933
65UL, 3613176865UL, 3484860981UL, 4278370194UL, 1979143878UL, 1159739458UL,
3714038404UL, 396530346UL, 3276617756UL, 3293940597UL, 4050183149UL, 14185
71985UL, 402563753UL, 2702853013UL, 2289900621UL, 2267058511UL, 3482161995U
L, 3375026019UL, 1988640267UL, 3674438074UL, 4124612310UL, 1057883705UL, 43
4730475UL, 3210959778UL, 4102029739UL, 2140938750UL, 3176753074UL, 23569715
12UL, 3969685288UL, 1556275580UL, 2648433428UL, 3959375381UL, 478841344UL,
1496991528UL, 3309714981UL, 569990368UL, 3660587501UL, 2550379574UL, 117751
9842UL, 2652707373UL, 543943404UL, 1912551128UL, 2278132032UL, 1484596780UL
, 3570913985UL, 2982401320UL, 1413776035UL, 3177275459UL, 3036211597UL, 109
1740466UL, 3448424311UL, 1445187645UL, 3205024875UL, 3135795254UL, 82373872
9UL, 3742134467UL, 4066657438UL, 1226311678UL, 2403605393UL, 537573634UL, 3
457409768UL, 1940233423UL, 1761431281UL, 1129427309UL, 2443661283UL, 320081
4257UL, 4094866249UL, 2666869754UL, 604785127UL, 2213464116UL, 3002782918UL
, 468024929UL, 2490681314UL, 3666681384UL, 1583346053UL, 3049668798UL, 3592
153237UL, 2573082448UL, 3082970021UL, 1461796708UL, 832526980UL, 3728763274
UL, 355291229UL, 4029588456UL, 832358279UL, 2125298737UL, 3681181038UL, 324
5535160UL, 1333342738UL, 1868897492UL, 446790068UL, 1278093154UL, 209011861
5UL, 4158925515UL, 4062165914UL, 822726809UL, 1154960183UL, 286518382UL, 11
70424276UL, 2554691236UL, 3674133415UL, 2765714969UL, 2330865375UL, 1908307
334UL, 3537287082UL, 410252600UL, 3977128218UL, 424210327UL, 2919071615UL,
2715518134UL, 64568844UL, 480972649UL, 2488797168UL, 1302817038UL, 22139952
65UL, 4229997295UL, 2200797852UL, 109368057UL, 3033807022UL, 1907400078UL,
645977948UL, 1410909090UL, 3700787906UL, 3375062371UL, 629087832UL, 1344281
719UL, 4249981139UL, 3457543297UL, 1218556849UL, 864222854UL, 1458445945UL,
914545469UL, 3451164212UL, 1088025757UL, 1129933985UL, 953788883UL, 240617
2924UL, 170364546UL, 3505490646UL, 1027553899UL, 2864067776UL, 436854871UL,
1342782209UL, 761167471UL, 2660173631UL, 4159507498UL, 4172028400UL, 24422
54644UL, 2110123720UL, 2315991253UL, 873066601UL, 1725470559UL, 3831299052U
L, 678672031UL, 1585431329UL, 3495750550UL, }, {1998393432UL, 2665389278UL,
3989307699UL, 3267631636UL, 3861682977UL, 3243522970UL, 1243992413UL, 2200
497260UL, 3821883021UL, 4187123083UL, 3451270040UL, 3044132745UL, 210128724
9UL, 2340839784UL, 227040990UL, 1724350416UL, 3228881240UL, 3123386528UL, 4
279362126UL, 3098224464UL, 2635534069UL, 3622906431UL, 206207480UL, 1894245
533UL, 2152374527UL, 1011223653UL, 7271757UL, 2972858087UL, 207942127UL, 33
55362797UL, 2593296740UL, 174093751UL, 3713822176UL, 4212355586UL, 33356052
24UL, 1171716408UL, 2867257989UL, 1522213957UL, 2016192462UL, 4229688395UL,
2174928148UL, 1468226225UL, 3938290338UL, 493240317UL, 3229423344UL, 25854
75729UL, 3112454413UL, 1881171707UL, 2555908056UL, 1997546352UL, 380428329U
L, 3341885423UL, 3307510279UL, 3519476676UL, 3613100811UL, 2555826262UL, 10
9341943UL, 2382715395UL, 3883409616UL, 1593551879UL, 2163678014UL, 33797831
37UL, 2810374300UL, 1516064864UL, 561144874UL, 316017838UL, 1899237567UL, 7
0857401UL, 3435185465UL, 4234661323UL, 2580352177UL, 32879620UL, 4171670150
UL, 1986234067UL, 3589478191UL, 2073132526UL, 2603712175UL, 377997975UL, 24
74419397UL, 3110698341UL, 812664089UL, 1778922726UL, 1686111212UL, 97278413
8UL, 3936486236UL, 2711468739UL, 423435866UL, 1661961159UL, 802312780UL, 18
68728136UL, 1760295704UL, 3357409828UL, 215039860UL, 683184627UL, 401911106
4UL, 3609261689UL, 2167554309UL, 1831085281UL, 3389357802UL, 4193421575UL,
628277197UL, 2900207619UL, 993609502UL, 3429627083UL, 2636466084UL, 3652352
199UL, 1780133580UL, 1670387713UL, 4086070210UL, 4004540729UL, 783029246UL,
2165667566UL, 1739001057UL, 377639972UL, 1102689625UL, 1945278055UL, 39411
85940UL, 3685368326UL, 1881761572UL, 2201338934UL, 801752UL, 2729497735UL,
492844690UL, 2998826141UL, 3844964457UL, 3679088359UL, 2196391660UL, 422226
9404UL, 357321611UL, 3727170055UL, 1819614072UL, 2348798457UL, 4294366646UL
, 1952884323UL, 3574345216UL, 2040734807UL, 232392443UL, 4183498179UL, 2614
866055UL, 112120292UL, 3624018350UL, 3340709877UL, 3097507723UL, 1268833488
UL, 3570501956UL, 3338260086UL, 293812421UL, 3683058169UL, 1147960351UL, 28
3731890UL, 2171233479UL, 1830154455UL, 4036602681UL, 1996981699UL, 13280383
4UL, 40256165UL, 2158110401UL, 3575159090UL, 3196553513UL, 3559872992UL, 34
02884675UL, 1998393432UL, 2665389278UL, 3989307699UL, 3267631636UL, 3617519
767UL, 3243522970UL, 1243992413UL, 2200497260UL, 3821883021UL, 3715729085UL
, 3451270040UL, 3044132745UL, 2101287249UL, 2340839784UL, 3173635549UL, 172
4350416UL, 3228881240UL, 3123386528UL, 4279362126UL, 2287520039UL, 26355340
69UL, 3622906431UL, 206207480UL, 1894245533UL, 96723416UL, 1011223653UL, 72
71757UL, 2972858087UL, 207942127UL, 1668335352UL, 2593296740UL, 174093751UL
, 3713822176UL, 4212355586UL, 49226793UL, 1171716408UL, 2867257989UL, 15222
13957UL, 2016192462UL, 118712412UL, 2174928148UL, 1468226225UL, 3938290338U
L, 493240317UL, 3788174304UL, 2585475729UL, 3112454413UL, 1881171707UL, 255
5908056UL, 3351139844UL, 380428329UL, 3341885423UL, 3307510279UL, 351947667
6UL, 1368994724UL, 2555826262UL, 109341943UL, 2382715395UL, 3883409616UL, 1
561509458UL, 2163678014UL, 3379783137UL, 2810374300UL, 1516064864UL, 231325
2274UL, 316017838UL, 1899237567UL, 70857401UL, 3435185465UL, 2585770746UL,
2580352177UL, 32879620UL, 4171670150UL, 1986234067UL, 3317983509UL, 2073132
526UL, 2603712175UL, 377997975UL, 2474419397UL, 908728599UL, 812664089UL, 1
778922726UL, 1686111212UL, 972784138UL, 1992540005UL, 2711468739UL, 4234358
66UL, 1661961159UL, 802312780UL, 907108769UL, 1760295704UL, 3357409828UL, 2
15039860UL, 683184627UL, 2806826652UL, 3609261689UL, 2167554309UL, 18310852
81UL, 3389357802UL, 2755692689UL, 628277197UL, 2900207619UL, 993609502UL, 3
429627083UL, 3605915742UL, 3652352199UL, 1780133580UL, 1670387713UL, 408607
0210UL, 3717326627UL, 783029246UL, 2165667566UL, 1739001057UL, 377639972UL,
2355216626UL, 1945278055UL, 3941185940UL, 3685368326UL, 1881761572UL, 4024
097818UL, 801752UL, 2729497735UL, 492844690UL, 2998826141UL, 2719601647UL,
3679088359UL, 2196391660UL, 4222269404UL, 357321611UL, 1319821972UL, 181961
4072UL, 2348798457UL, 4294366646UL, 1952884323UL, 3573866689UL, 2040734807U
L, 232392443UL, 4183498179UL, 2614866055UL, 440744432UL, 3624018350UL, 3340
709877UL, 3097507723UL, 1268833488UL, 224895395UL, 3338260086UL, 293812421U
L, 3683058169UL, 1147960351UL, 3433425235UL, 2171233479UL, 1830154455UL, 40
36602681UL, 1996981699UL, 2875889721UL, 40256165UL, 2158110401UL, 357515909
0UL, 3196553513UL, 1094082574UL, 3402884675UL, 1998393432UL, 2665389278UL,
3989307699UL, 4068940467UL, 3617519767UL, 3243522970UL, 1243992413UL, 22004
97260UL, 441678457UL, 3715729085UL, 3451270040UL, 3044132745UL, 2101287249U
L, 2181502237UL, 3173635549UL, 1724350416UL, 3228881240UL, 3123386528UL, 19
68352124UL, 2287520039UL, 2635534069UL, 3622906431UL, 206207480UL, 20650935
99UL, 96723416UL, 1011223653UL, 7271757UL, 2972858087UL, 1094044749UL, 1668
335352UL, 2593296740UL, 174093751UL, 3713822176UL, 2887397643UL, 49226793UL
, 1171716408UL, 2867257989UL, 1522213957UL, 984348433UL, 118712412UL, 21749
28148UL, 1468226225UL, 3938290338UL, 2279430036UL, 3788174304UL, 2585475729
UL, 3112454413UL, 1881171707UL, 4247636500UL, 3351139844UL, 380428329UL, 33
41885423UL, 3307510279UL, 2887754196UL, 1368994724UL, 2555826262UL, 1093419
43UL, 2382715395UL, 2836761616UL, 1561509458UL, 2163678014UL, 3379783137UL,
2810374300UL, 1635278016UL, 2313252274UL, 316017838UL, 1899237567UL, 70857
401UL, 3481535811UL, 2585770746UL, 2580352177UL, 32879620UL, 4171670150UL,
2248003250UL, 3317983509UL, 2073132526UL, 2603712175UL, 377997975UL, 328616
2818UL, 908728599UL, 812664089UL, 1778922726UL, 1686111212UL, 4024815755UL,
1992540005UL, 2711468739UL, 423435866UL, 1661961159UL, 2257259057UL, 90710
8769UL, 1760295704UL, 3357409828UL, 215039860UL, 3917391198UL, 2806826652UL
, 3609261689UL, 2167554309UL, 1831085281UL, 4238043113UL, 2755692689UL, 628
277197UL, 2900207619UL, 993609502UL, 2036092353UL, 3605915742UL, 3652352199
UL, 1780133580UL, 1670387713UL, 118446953UL, 3717326627UL, 783029246UL, 216
5667566UL, 1739001057UL, 203160626UL, 2355216626UL, 1945278055UL, 394118594
0UL, 3685368326UL, 546361979UL, 4024097818UL, 801752UL, 2729497735UL, 49284
4690UL, 1023017124UL, 2719601647UL, 3679088359UL, 2196391660UL, 4222269404U
L, 621859651UL, 1319821972UL, 1819614072UL, 2348798457UL, 4294366646UL, 111
4888560UL, 3573866689UL, 2040734807UL, 232392443UL, 4183498179UL, 395950460
9UL, 440744432UL, 3624018350UL, 3340709877UL, 3097507723UL, 3613295037UL, 2
24895395UL, 3338260086UL, 293812421UL, 3683058169UL, 1655305863UL, 34334252
35UL, 2171233479UL, 1830154455UL, 4036602681UL, 3731384097UL, 2875889721UL,
40256165UL, 2158110401UL, 3575159090UL, 1847744924UL, 1094082574UL, 340288
4675UL, 1998393432UL, 2665389278UL, 3781866777UL, 4068940467UL, 3617519767U
L, 3243522970UL, 1243992413UL, 2723708256UL, 441678457UL, 3715729085UL, 345
1270040UL, 3044132745UL, 4013832842UL, 2181502237UL, 3173635549UL, 17243504
16UL, 3228881240UL, 2092292494UL, 1968352124UL, 2287520039UL, 2635534069UL,
3622906431UL, 3186333458UL, 2065093599UL, 96723416UL, 1011223653UL, 727175
7UL, 649658033UL, 1094044749UL, 1668335352UL, 2593296740UL, 174093751UL, 41
59420309UL, 2887397643UL, 49226793UL, 1171716408UL, 2867257989UL, 259007795
3UL, 984348433UL, 118712412UL, 2174928148UL, 1468226225UL, 1065322711UL, 22
79430036UL, 3788174304UL, 2585475729UL, 3112454413UL, 3932517386UL, 4247636
500UL, 3351139844UL, 380428329UL, 3341885423UL, 1285273904UL, 2887754196UL,
1368994724UL, 2555826262UL, 109341943UL, 2318470582UL, 2836761616UL, 15615
09458UL, 2163678014UL, 3379783137UL, 674658583UL, 1635278016UL, 2313252274U
L, 316017838UL, 1899237567UL, 2192372173UL, 3481535811UL, 2585770746UL, 258
0352177UL, 32879620UL, 300323274UL, 2248003250UL, 3317983509UL, 2073132526U
L, 2603712175UL, 3086543917UL, 3286162818UL, 908728599UL, 812664089UL, 1778
922726UL, 2263290659UL, 4024815755UL, 1992540005UL, 2711468739UL, 423435866
UL, 819027349UL, 2257259057UL, 907108769UL, 1760295704UL, 3357409828UL, 114
2221093UL, 3917391198UL, 2806826652UL, 3609261689UL, 2167554309UL, 41081558
75UL, 4238043113UL, 2755692689UL, 628277197UL, 2900207619UL, 3041719497UL,
2036092353UL, 3605915742UL, 3652352199UL, 1780133580UL, 2397410862UL, 11844
6953UL, 3717326627UL, 783029246UL, 2165667566UL, 2721690354UL, 203160626UL,
2355216626UL, 1945278055UL, 3941185940UL, 2768842108UL, 546361979UL, 40240
97818UL, 801752UL, 2729497735UL, 4045063232UL, 1023017124UL, 2719601647UL,
3679088359UL, 2196391660UL, 2666107451UL, 621859651UL, 1319821972UL, 181961
4072UL, 2348798457UL, 3555102623UL, 1114888560UL, 3573866689UL, 2040734807U
L, 232392443UL, 3359040541UL, 3959504609UL, 440744432UL, 3624018350UL, 3340
709877UL, 1477919696UL, 3613295037UL, 224895395UL, 3338260086UL, 293812421U
L, 4210187101UL, 1655305863UL, 3433425235UL, 2171233479UL, 1830154455UL, 41
50241150UL, 3731384097UL, 2875889721UL, 40256165UL, 2158110401UL, 335024668
7UL, 455561037UL, 2250400255UL, 3192153445UL, 3258870230UL, 1500391873UL, 4
142878334UL, 1155955691UL, 1483275844UL, 4189436981UL, 323745948UL, 1976017
426UL, 2804626790UL, 2717553615UL, 2315409034UL, 954508235UL, 3845175920UL,
3999878682UL, 1247696432UL, 1743319509UL, 2998248398UL, 3694350012UL, 4072
006361UL, 191306987UL, 2816321878UL, 1324077734UL, 1083060006UL, 3406855480
UL, 1619622379UL, 2160350UL, 3302238190UL, 3368021261UL, 3685228564UL, 3863
934685UL, 771728612UL, 854205233UL, 2304696695UL, 421449207UL, 1265752117UL
, 3852292419UL, 305345788UL, 1540622105UL, 1904883477UL, 833469256UL, 13440
6680UL, 3012455058UL, 4035477953UL, 2925192459UL, 1559200592UL, 3851612860U
L, 718484562UL, 1377960276UL, 1586892849UL, 1361298269UL, 3417917896UL, 128
1324499UL, 1012538763UL, 1350578667UL, 3946475598UL, 2982283954UL, 35487928
04UL, 284542749UL, 1194648577UL, 3087899716UL, 3966595444UL, 2088330116UL,
3641652062UL, 327128507UL, 593906557UL, 1092448919UL, 2459189516UL, 4053392
241UL, 3356198248UL, 2352376508UL, 470648997UL, 1017041256UL, 3234172340UL,
3928191489UL, 3266226858UL, 4219289150UL, 1229098319UL, 4275351308UL, 2720
777751UL, 3566728718UL, 638322822UL, 2369792461UL, 2869492261UL, 3120083828
UL, 1890399556UL, 3309991008UL, 3785452464UL, 4128660314UL, 3726791982UL, 1
67177896UL, 461294981UL, 3988638998UL, 2937794823UL, 3981029822UL, 11116814
02UL, 2015965721UL, 7261806UL, 2669786265UL, 1083582734UL, 3270228881UL, 38
92235938UL, 2695872715UL, 4246051290UL, 3214293333UL, 343604199UL, 32156048
88UL, 661024127UL, 2931754053UL, 3787840039UL, 2053363765UL, 363432336UL, 1
12334132UL, 2871797223UL, 138911320UL, 3981126938UL, 2027332192UL, 18047306
44UL, 590150270UL, 641538574UL, 6802174UL, 3551446076UL, 3908480472UL, 1004
531022UL, 2097228524UL, 1919074232UL, 154482247UL, 121437972UL, 1215661323U
L, 1178068273UL, 1097220699UL, 2823681422UL, 262636065UL, 2943371149UL, 176
8780720UL, 3866040605UL, 1855991583UL, 3988248086UL, 629223947UL, 338061233
0UL, 3552916762UL, 197596340UL, 573801686UL, 2049230598UL, 2910471867UL, 26
86314264UL, 1726228846UL, 3516983332UL, 726840185UL, 1241204222UL, 22375743
17UL, 70568042UL, 1932610099UL, 2221862221UL, 1510378092UL, 4050391637UL, 4
077539568UL, }, {3872117793UL, 803220151UL, 70843412UL, 1661103032UL, 19768
11457UL, 2186373604UL, 564259972UL, 1475436923UL, 2260980893UL, 4245534505U
L, 1075107552UL, 3692990573UL, 370098873UL, 4045905424UL, 2420395420UL, 233
2395402UL, 207483321UL, 622317750UL, 3004242500UL, 833623111UL, 3151161301U
L, 1629139881UL, 352228793UL, 2439953368UL, 3183333619UL, 2703537080UL, 321
8957129UL, 3164695888UL, 1741641842UL, 963394141UL, 4241612717UL, 103447678
4UL, 2035880432UL, 3977821313UL, 1543311495UL, 3010014356UL, 1638490901UL,
2364265378UL, 3420329129UL, 333361555UL, 1133565821UL, 1450937015UL, 616059
115UL, 3216393887UL, 3041978455UL, 3990855695UL, 1238628750UL, 512746184UL,
3256670217UL, 1616316512UL, 2791405051UL, 93474487UL, 2865892488UL, 190147
1398UL, 2930857966UL, 2178431077UL, 2325598341UL, 3189256113UL, 1302432091U
L, 808592927UL, 2945846737UL, 3487931071UL, 2018175258UL, 752981057UL, 1097
082589UL, 1307115286UL, 175147508UL, 3611190164UL, 850238914UL, 3318706185U
L, 199743319UL, 328621708UL, 3183670050UL, 3609998315UL, 4075306371UL, 3554
549067UL, 2119566187UL, 1498503842UL, 1261870696UL, 2216745780UL, 950288337
UL, 1117344941UL, 2150569143UL, 2899286760UL, 1594966374UL, 888858617UL, 35
840654UL, 2829539211UL, 2511395669UL, 3607190544UL, 3278412778UL, 224989590
7UL, 1320858068UL, 3576889788UL, 266766189UL, 1522426851UL, 1903494122UL, 1
928370573UL, 2628132591UL, 3322025904UL, 220280169UL, 433606853UL, 14289614
79UL, 986074592UL, 2128892987UL, 467697583UL, 1616913929UL, 325674890UL, 44
4442578UL, 649166208UL, 1689709565UL, 1493452467UL, 2222122038UL, 121114616
UL, 2134348225UL, 3512035688UL, 1283058921UL, 4230441398UL, 3701238559UL, 3
37534132UL, 1418548715UL, 1190006478UL, 500654385UL, 1766924757UL, 19446807
46UL, 940574010UL, 922744002UL, 186142284UL, 3131162902UL, 1693891092UL, 30
31823448UL, 2143051534UL, 1429025284UL, 1487843160UL, 3606456133UL, 2079235
652UL, 2447285474UL, 2669283767UL, 3232117829UL, 2490054343UL, 3225501736UL
, 2911340385UL, 382319031UL, 1516937595UL, 622543191UL, 1388990570UL, 17491
79860UL, 1924483707UL, 2593474505UL, 472539197UL, 122872799UL, 2586347240UL
, 880588515UL, 4046335279UL, 1712182607UL, 4270737941UL, 1336703451UL, 3390
078162UL, 382216945UL, 3733326081UL, 460422073UL, 3872117793UL, 803220151UL
, 70843412UL, 1661103032UL, 250339760UL, 2186373604UL, 564259972UL, 1475436
923UL, 2260980893UL, 657986735UL, 1075107552UL, 3692990573UL, 370098873UL,
4045905424UL, 3201950123UL, 2332395402UL, 207483321UL, 622317750UL, 3004242
500UL, 3732213278UL, 3151161301UL, 1629139881UL, 352228793UL, 2439953368UL,
3572618926UL, 2703537080UL, 3218957129UL, 3164695888UL, 1741641842UL, 6859
33373UL, 4241612717UL, 1034476784UL, 2035880432UL, 3977821313UL, 3855995181
UL, 3010014356UL, 1638490901UL, 2364265378UL, 3420329129UL, 2355603679UL, 1
133565821UL, 1450937015UL, 616059115UL, 3216393887UL, 1733804102UL, 3990855
695UL, 1238628750UL, 512746184UL, 3256670217UL, 2651059231UL, 2791405051UL,
93474487UL, 2865892488UL, 1901471398UL, 2113461797UL, 2178431077UL, 232559
8341UL, 3189256113UL, 1302432091UL, 2986990416UL, 2945846737UL, 3487931071U
L, 2018175258UL, 752981057UL, 2428033310UL, 1307115286UL, 175147508UL, 3611
190164UL, 850238914UL, 1033628405UL, 199743319UL, 328621708UL, 3183670050UL
, 3609998315UL, 4024297327UL, 3554549067UL, 2119566187UL, 1498503842UL, 126
1870696UL, 290361143UL, 950288337UL, 1117344941UL, 2150569143UL, 2899286760
UL, 168826051UL, 888858617UL, 35840654UL, 2829539211UL, 2511395669UL, 28908
82060UL, 3278412778UL, 2249895907UL, 1320858068UL, 3576889788UL, 1794920145
UL, 1522426851UL, 1903494122UL, 1928370573UL, 2628132591UL, 1251697758UL, 2
20280169UL, 433606853UL, 1428961479UL, 986074592UL, 2707115661UL, 467697583
UL, 1616913929UL, 325674890UL, 444442578UL, 122781510UL, 1689709565UL, 1493
452467UL, 2222122038UL, 121114616UL, 3425723636UL, 3512035688UL, 1283058921
UL, 4230441398UL, 3701238559UL, 1646155473UL, 1418548715UL, 1190006478UL, 5
00654385UL, 1766924757UL, 3920475367UL, 940574010UL, 922744002UL, 186142284
UL, 3131162902UL, 54639113UL, 3031823448UL, 2143051534UL, 1429025284UL, 148
7843160UL, 4152687885UL, 2079235652UL, 2447285474UL, 2669283767UL, 32321178
29UL, 1601035152UL, 3225501736UL, 2911340385UL, 382319031UL, 1516937595UL,
3508441679UL, 1388990570UL, 1749179860UL, 1924483707UL, 2593474505UL, 28354
03456UL, 122872799UL, 2586347240UL, 880588515UL, 4046335279UL, 2958058367UL
, 4270737941UL, 1336703451UL, 3390078162UL, 382216945UL, 450517882UL, 46042
2073UL, 3872117793UL, 803220151UL, 70843412UL, 2066343874UL, 250339760UL, 2
186373604UL, 564259972UL, 1475436923UL, 1683787449UL, 657986735UL, 10751075
52UL, 3692990573UL, 370098873UL, 2615082840UL, 3201950123UL, 2332395402UL,
207483321UL, 622317750UL, 2655424371UL, 3732213278UL, 3151161301UL, 1629139
881UL, 352228793UL, 3236724760UL, 3572618926UL, 2703537080UL, 3218957129UL,
3164695888UL, 9775065UL, 685933373UL, 4241612717UL, 1034476784UL, 20358804
32UL, 1621920075UL, 3855995181UL, 3010014356UL, 1638490901UL, 2364265378UL,
1509475888UL, 2355603679UL, 1133565821UL, 1450937015UL, 616059115UL, 36661
88236UL, 1733804102UL, 3990855695UL, 1238628750UL, 512746184UL, 3900473826U
L, 2651059231UL, 2791405051UL, 93474487UL, 2865892488UL, 222759186UL, 21134
61797UL, 2178431077UL, 2325598341UL, 3189256113UL, 2505499508UL, 2986990416
UL, 2945846737UL, 3487931071UL, 2018175258UL, 2766733928UL, 2428033310UL, 1
307115286UL, 175147508UL, 3611190164UL, 1909211603UL, 1033628405UL, 1997433
19UL, 328621708UL, 3183670050UL, 1680331218UL, 4024297327UL, 3554549067UL,
2119566187UL, 1498503842UL, 3516256046UL, 290361143UL, 950288337UL, 1117344
941UL, 2150569143UL, 3182619063UL, 168826051UL, 888858617UL, 35840654UL, 28
29539211UL, 645798943UL, 2890882060UL, 3278412778UL, 2249895907UL, 13208580
68UL, 1436708568UL, 1794920145UL, 1522426851UL, 1903494122UL, 1928370573UL,
3693049252UL, 1251697758UL, 220280169UL, 433606853UL, 1428961479UL, 372441
5861UL, 2707115661UL, 467697583UL, 1616913929UL, 325674890UL, 1448052253UL,
122781510UL, 1689709565UL, 1493452467UL, 2222122038UL, 2177448198UL, 34257
23636UL, 3512035688UL, 1283058921UL, 4230441398UL, 3050940272UL, 1646155473
UL, 1418548715UL, 1190006478UL, 500654385UL, 1106232UL, 3920475367UL, 94057
4010UL, 922744002UL, 186142284UL, 4144806511UL, 54639113UL, 3031823448UL, 2
143051534UL, 1429025284UL, 2067453848UL, 4152687885UL, 2079235652UL, 244728
5474UL, 2669283767UL, 428527087UL, 1601035152UL, 3225501736UL, 2911340385UL
, 382319031UL, 2565464472UL, 3508441679UL, 1388990570UL, 1749179860UL, 1924
483707UL, 1737735237UL, 2835403456UL, 122872799UL, 2586347240UL, 880588515U
L, 597822462UL, 2958058367UL, 4270737941UL, 1336703451UL, 3390078162UL, 253
2634475UL, 450517882UL, 460422073UL, 3872117793UL, 803220151UL, 801648827UL
, 2066343874UL, 250339760UL, 2186373604UL, 564259972UL, 3417948976UL, 16837
87449UL, 657986735UL, 1075107552UL, 3692990573UL, 2235306692UL, 2615082840U
L, 3201950123UL, 2332395402UL, 207483321UL, 699310933UL, 2655424371UL, 3732
213278UL, 3151161301UL, 1629139881UL, 1152704006UL, 3236724760UL, 357261892
6UL, 2703537080UL, 3218957129UL, 2726926336UL, 9775065UL, 685933373UL, 4241
612717UL, 1034476784UL, 2398119652UL, 1621920075UL, 3855995181UL, 301001435
6UL, 1638490901UL, 252854480UL, 1509475888UL, 2355603679UL, 1133565821UL, 1
450937015UL, 2655911639UL, 3666188236UL, 1733804102UL, 3990855695UL, 123862
8750UL, 1115900497UL, 3900473826UL, 2651059231UL, 2791405051UL, 93474487UL,
1862985957UL, 222759186UL, 2113461797UL, 2178431077UL, 2325598341UL, 41790
75132UL, 2505499508UL, 2986990416UL, 2945846737UL, 3487931071UL, 564667776U
L, 2766733928UL, 2428033310UL, 1307115286UL, 175147508UL, 1759077815UL, 190
9211603UL, 1033628405UL, 199743319UL, 328621708UL, 2552816198UL, 1680331218
UL, 4024297327UL, 3554549067UL, 2119566187UL, 2267805778UL, 3516256046UL, 2
90361143UL, 950288337UL, 1117344941UL, 2897506172UL, 3182619063UL, 16882605
1UL, 888858617UL, 35840654UL, 2035476068UL, 645798943UL, 2890882060UL, 3278
412778UL, 2249895907UL, 3278449102UL, 1436708568UL, 1794920145UL, 152242685
1UL, 1903494122UL, 1500763736UL, 3693049252UL, 1251697758UL, 220280169UL, 4
33606853UL, 3914497854UL, 3724415861UL, 2707115661UL, 467697583UL, 16169139
29UL, 918435305UL, 1448052253UL, 122781510UL, 1689709565UL, 1493452467UL, 6
09575172UL, 2177448198UL, 3425723636UL, 3512035688UL, 1283058921UL, 3661181
550UL, 3050940272UL, 1646155473UL, 1418548715UL, 1190006478UL, 1047301661UL
, 1106232UL, 3920475367UL, 940574010UL, 922744002UL, 2510633517UL, 41448065
11UL, 54639113UL, 3031823448UL, 2143051534UL, 3242814908UL, 2067453848UL, 4
152687885UL, 2079235652UL, 2447285474UL, 736638210UL, 428527087UL, 16010351
52UL, 3225501736UL, 2911340385UL, 1849570436UL, 2565464472UL, 3508441679UL,
1388990570UL, 1749179860UL, 84517579UL, 1737735237UL, 2835403456UL, 122872
799UL, 2586347240UL, 4002124614UL, 597822462UL, 2958058367UL, 4270737941UL,
1336703451UL, 3078170472UL, 1186434751UL, 700631413UL, 1497890797UL, 11953
47450UL, 2560167391UL, 1116697259UL, 1254138573UL, 747913260UL, 240954704UL
, 3107512667UL, 360584144UL, 3422778960UL, 3516528389UL, 3301260366UL, 1254
513537UL, 122269053UL, 1579582456UL, 873334104UL, 3918835024UL, 1731872444U
L, 1974410416UL, 1811172641UL, 4172523062UL, 4092675777UL, 4124987343UL, 19
36078756UL, 1757348689UL, 2694415512UL, 128641660UL, 1744777659UL, 31731167
29UL, 983733754UL, 1430789547UL, 701906842UL, 3367232568UL, 3266433501UL, 3
572590347UL, 1453272962UL, 2106553114UL, 993786201UL, 2149441250UL, 1295181
065UL, 2962229026UL, 3709052556UL, 3255608941UL, 3677730029UL, 483873127UL,
102227292UL, 2626265293UL, 2018984578UL, 2266388762UL, 1191709548UL, 21527
25916UL, 583672623UL, 2230473473UL, 1995194269UL, 1740347812UL, 2558095372U
L, 3070195183UL, 3023333227UL, 2497183195UL, 1908755188UL, 773027539UL, 364
6876518UL, 2272586839UL, 493318726UL, 2107067517UL, 2000805278UL, 253082963
6UL, 3183628745UL, 677565332UL, 1497629423UL, 82094920UL, 2214054433UL, 263
5367545UL, 470855467UL, 2184853389UL, 2942188934UL, 188335670UL, 3656661644
UL, 1883526235UL, 3990873975UL, 1490784356UL, 4047548172UL, 3149642641UL, 3
289988179UL, 2590918909UL, 2893039564UL, 2350687346UL, 4252624874UL, 153724
56UL, 1614496594UL, 2364847678UL, 2604511825UL, 422365460UL, 4195174772UL,
3266964836UL, 2008671995UL, 54038434UL, 781948549UL, 1276017666UL, 27563766
12UL, 2436825273UL, 1711863836UL, 3541493950UL, 3821378841UL, 1007557618UL,
345375815UL, 2081905201UL, 2227278118UL, 1185927141UL, 1082173792UL, 35673
61925UL, 1940465859UL, 541632942UL, 1830210248UL, 3757851982UL, 775883450UL
, 1666577465UL, 1004944607UL, 878440834UL, 2146344131UL, 4195798476UL, 3701
64841UL, 3649112729UL, 37066142UL, 2311278904UL, 1935745497UL, 2304799402UL
, 4107299626UL, 1348526232UL, 2473609635UL, 3284032699UL, 2374292786UL, 176
2329186UL, 857978496UL, 1039346432UL, 2621413355UL, 29961014UL, 3582263091U
L, 4268542513UL, 3890612190UL, 3096173646UL, 2026544230UL, 3856142618UL, 23
47115934UL, 319800326UL, 3255916105UL, 2430273059UL, 823505311UL, 874255188
UL, 1401925393UL, 4203707857UL, 4259159566UL, 2606881118UL, 1978288664UL, 1
447576038UL, 3860341401UL, 412510348UL, }, {4052471963UL, 683640040UL, 3043
876021UL, 3466644483UL, 4222418025UL, 3035140128UL, 1466027937UL, 18198088U
L, 3410320851UL, 3040963721UL, 488404231UL, 3157371815UL, 769336092UL, 3240
417718UL, 808582581UL, 2075839263UL, 835026995UL, 3123726486UL, 3284240985U
L, 1898453053UL, 3606056482UL, 512836002UL, 2715428547UL, 4182302879UL, 164
4882480UL, 3160187826UL, 390292489UL, 980889545UL, 2776206633UL, 2482799995
UL, 617042280UL, 3501667414UL, 689451808UL, 497018701UL, 238525753UL, 38901
63301UL, 896679896UL, 1544533015UL, 3412477225UL, 3116575138UL, 4250402651U
L, 3990990746UL, 819056741UL, 1459334146UL, 158377590UL, 3444755752UL, 8230
450UL, 1378706455UL, 684191332UL, 3217423797UL, 2842520097UL, 1631477948UL,
2591254230UL, 959644473UL, 1020694107UL, 1748401915UL, 3452514983UL, 38927
66171UL, 1227786994UL, 2086180800UL, 2394613217UL, 2091953150UL, 870094953U
L, 2306851481UL, 571550601UL, 488878212UL, 873197214UL, 2630100528UL, 20674
76907UL, 2162307009UL, 2026119728UL, 115875280UL, 2905867426UL, 248774881UL
, 3110900450UL, 2236032812UL, 1888510348UL, 708001855UL, 996960491UL, 35141
96956UL, 1407967546UL, 1826568876UL, 3659618284UL, 2614104317UL, 2230066308
UL, 1055135881UL, 2537437343UL, 1858044413UL, 2608594891UL, 2750681169UL, 3
241939420UL, 3966440877UL, 2375002886UL, 2417753441UL, 1405878685UL, 108113
3199UL, 1496940727UL, 382467042UL, 2745477587UL, 1209424459UL, 811187075UL,
1385604734UL, 2623887355UL, 3443875720UL, 394141555UL, 4142998949UL, 41954
14618UL, 1489846841UL, 2253433808UL, 1171450286UL, 84131191UL, 4387588UL, 2
641405140UL, 3525405389UL, 3273000909UL, 423660319UL, 2366546732UL, 3698878
607UL, 2161119729UL, 4263629085UL, 3029102089UL, 2692507376UL, 3266869596UL
, 1658012061UL, 1960169440UL, 1002311379UL, 3724446882UL, 2004188516UL, 999
513506UL, 2200093802UL, 4141037460UL, 351865836UL, 412875013UL, 1535823315U
L, 3880657632UL, 3109944987UL, 3207577548UL, 3462087941UL, 584875517UL, 263
5241084UL, 3834145971UL, 1693380373UL, 3524443732UL, 934775214UL, 196058884
7UL, 2226778032UL, 1044609478UL, 12199016UL, 1120582000UL, 226430296UL, 665
553142UL, 2570993348UL, 1685535237UL, 3325420136UL, 3925248326UL, 285534637
6UL, 1205558328UL, 808835317UL, 3295908896UL, 4170076136UL, 2438272365UL, 4
052471963UL, 683640040UL, 3043876021UL, 3466644483UL, 1385549869UL, 3035140
128UL, 1466027937UL, 18198088UL, 3410320851UL, 2171386836UL, 488404231UL, 3
157371815UL, 769336092UL, 3240417718UL, 2921774554UL, 2075839263UL, 8350269
95UL, 3123726486UL, 3284240985UL, 72352110UL, 3606056482UL, 512836002UL, 27
15428547UL, 4182302879UL, 3869483469UL, 3160187826UL, 390292489UL, 98088954
5UL, 2776206633UL, 1385691983UL, 617042280UL, 3501667414UL, 689451808UL, 49
7018701UL, 2600411809UL, 3890163301UL, 896679896UL, 1544533015UL, 341247722
5UL, 356556378UL, 4250402651UL, 3990990746UL, 819056741UL, 1459334146UL, 19
9003993UL, 3444755752UL, 8230450UL, 1378706455UL, 684191332UL, 1750733272UL
, 2842520097UL, 1631477948UL, 2591254230UL, 959644473UL, 2113375576UL, 1748
401915UL, 3452514983UL, 3892766171UL, 1227786994UL, 275473920UL, 2394613217
UL, 2091953150UL, 870094953UL, 2306851481UL, 897057645UL, 488878212UL, 8731
97214UL, 2630100528UL, 2067476907UL, 944114068UL, 2026119728UL, 115875280UL
, 2905867426UL, 248774881UL, 989201307UL, 2236032812UL, 1888510348UL, 70800
1855UL, 996960491UL, 2121706374UL, 1407967546UL, 1826568876UL, 3659618284UL
, 2614104317UL, 2931815032UL, 1055135881UL, 2537437343UL, 1858044413UL, 260
8594891UL, 1423973935UL, 3241939420UL, 3966440877UL, 2375002886UL, 24177534
41UL, 2514473440UL, 1081133199UL, 1496940727UL, 382467042UL, 2745477587UL,
81977310UL, 811187075UL, 1385604734UL, 2623887355UL, 3443875720UL, 21006298
79UL, 4142998949UL, 4195414618UL, 1489846841UL, 2253433808UL, 337182869UL,
84131191UL, 4387588UL, 2641405140UL, 3525405389UL, 661876463UL, 423660319UL
, 2366546732UL, 3698878607UL, 2161119729UL, 309510684UL, 3029102089UL, 2692
507376UL, 3266869596UL, 1658012061UL, 11119541UL, 1002311379UL, 3724446882U
L, 2004188516UL, 999513506UL, 3486722046UL, 4141037460UL, 351865836UL, 4128
75013UL, 1535823315UL, 2818130700UL, 3109944987UL, 3207577548UL, 3462087941
UL, 584875517UL, 322875622UL, 3834145971UL, 1693380373UL, 3524443732UL, 934
775214UL, 3879414752UL, 2226778032UL, 1044609478UL, 12199016UL, 1120582000U
L, 4207259464UL, 665553142UL, 2570993348UL, 1685535237UL, 3325420136UL, 553
869152UL, 2855346376UL, 1205558328UL, 808835317UL, 3295908896UL, 470585896U
L, 2438272365UL, 4052471963UL, 683640040UL, 3043876021UL, 1588419572UL, 138
5549869UL, 3035140128UL, 1466027937UL, 18198088UL, 363815288UL, 2171386836U
L, 488404231UL, 3157371815UL, 769336092UL, 2464768302UL, 2921774554UL, 2075
839263UL, 835026995UL, 3123726486UL, 4229246330UL, 72352110UL, 3606056482UL
, 512836002UL, 2715428547UL, 319830805UL, 3869483469UL, 3160187826UL, 39029
2489UL, 980889545UL, 2966401462UL, 1385691983UL, 617042280UL, 3501667414UL,
689451808UL, 4047377762UL, 2600411809UL, 3890163301UL, 896679896UL, 154453
3015UL, 764316452UL, 356556378UL, 4250402651UL, 3990990746UL, 819056741UL,
965331966UL, 199003993UL, 3444755752UL, 8230450UL, 1378706455UL, 51902971UL
, 1750733272UL, 2842520097UL, 1631477948UL, 2591254230UL, 426039404UL, 2113
375576UL, 1748401915UL, 3452514983UL, 3892766171UL, 2833368447UL, 275473920
UL, 2394613217UL, 2091953150UL, 870094953UL, 3524323828UL, 897057645UL, 488
878212UL, 873197214UL, 2630100528UL, 3939852929UL, 944114068UL, 2026119728U
L, 115875280UL, 2905867426UL, 3192643919UL, 989201307UL, 2236032812UL, 1888
510348UL, 708001855UL, 2166012172UL, 2121706374UL, 1407967546UL, 1826568876
UL, 3659618284UL, 135277096UL, 2931815032UL, 1055135881UL, 2537437343UL, 18
58044413UL, 2588429924UL, 1423973935UL, 3241939420UL, 3966440877UL, 2375002
886UL, 2477142003UL, 2514473440UL, 1081133199UL, 1496940727UL, 382467042UL,
1760129281UL, 81977310UL, 811187075UL, 1385604734UL, 2623887355UL, 4070531
513UL, 2100629879UL, 4142998949UL, 4195414618UL, 1489846841UL, 2688068550UL
, 337182869UL, 84131191UL, 4387588UL, 2641405140UL, 1837403234UL, 661876463
UL, 423660319UL, 2366546732UL, 3698878607UL, 2916121190UL, 309510684UL, 302
9102089UL, 2692507376UL, 3266869596UL, 303422295UL, 11119541UL, 1002311379U
L, 3724446882UL, 2004188516UL, 2652711421UL, 3486722046UL, 4141037460UL, 35
1865836UL, 412875013UL, 113149471UL, 2818130700UL, 3109944987UL, 3207577548
UL, 3462087941UL, 1443140792UL, 322875622UL, 3834145971UL, 1693380373UL, 35
24443732UL, 901891935UL, 3879414752UL, 2226778032UL, 1044609478UL, 12199016
UL, 2213168758UL, 4207259464UL, 665553142UL, 2570993348UL, 1685535237UL, 11
14492412UL, 553869152UL, 2855346376UL, 1205558328UL, 808835317UL, 326662629
4UL, 470585896UL, 2438272365UL, 4052471963UL, 683640040UL, 3581539398UL, 15
88419572UL, 1385549869UL, 3035140128UL, 1466027937UL, 4075470388UL, 3638152
88UL, 2171386836UL, 488404231UL, 3157371815UL, 2759472233UL, 2464768302UL,
2921774554UL, 2075839263UL, 835026995UL, 1030654310UL, 4229246330UL, 723521
10UL, 3606056482UL, 512836002UL, 961858496UL, 319830805UL, 3869483469UL, 31
60187826UL, 390292489UL, 2366221117UL, 2966401462UL, 1385691983UL, 61704228
0UL, 3501667414UL, 295865937UL, 4047377762UL, 2600411809UL, 3890163301UL, 8
96679896UL, 21714884UL, 764316452UL, 356556378UL, 4250402651UL, 3990990746U
L, 1012967081UL, 965331966UL, 199003993UL, 3444755752UL, 8230450UL, 1255302
023UL, 51902971UL, 1750733272UL, 2842520097UL, 1631477948UL, 2321320272UL,
426039404UL, 2113375576UL, 1748401915UL, 3452514983UL, 2847013518UL, 283336
8447UL, 275473920UL, 2394613217UL, 2091953150UL, 1250695522UL, 3524323828UL
, 897057645UL, 488878212UL, 873197214UL, 1452317325UL, 3939852929UL, 944114
068UL, 2026119728UL, 115875280UL, 4061820350UL, 3192643919UL, 989201307UL,
2236032812UL, 1888510348UL, 3986446165UL, 2166012172UL, 2121706374UL, 14079
67546UL, 1826568876UL, 2910745432UL, 135277096UL, 2931815032UL, 1055135881U
L, 2537437343UL, 2976455307UL, 2588429924UL, 1423973935UL, 3241939420UL, 39
66440877UL, 2418897705UL, 2477142003UL, 2514473440UL, 1081133199UL, 1496940
727UL, 1321648771UL, 1760129281UL, 81977310UL, 811187075UL, 1385604734UL, 1
7644628UL, 4070531513UL, 2100629879UL, 4142998949UL, 4195414618UL, 26973105
27UL, 2688068550UL, 337182869UL, 84131191UL, 4387588UL, 1724191700UL, 18374
03234UL, 661876463UL, 423660319UL, 2366546732UL, 693430992UL, 2916121190UL,
309510684UL, 3029102089UL, 2692507376UL, 3917396098UL, 303422295UL, 111195
41UL, 1002311379UL, 3724446882UL, 841468294UL, 2652711421UL, 3486722046UL,
4141037460UL, 351865836UL, 1733384185UL, 113149471UL, 2818130700UL, 3109944
987UL, 3207577548UL, 2326233100UL, 1443140792UL, 322875622UL, 3834145971UL,
1693380373UL, 1580706359UL, 901891935UL, 3879414752UL, 2226778032UL, 10446
09478UL, 3805470822UL, 2213168758UL, 4207259464UL, 665553142UL, 2570993348U
L, 3406548636UL, 1114492412UL, 553869152UL, 2855346376UL, 1205558328UL, 428
7831475UL, 1329654114UL, 2347235746UL, 2477803138UL, 2962371859UL, 36100242
83UL, 4197266903UL, 1162294689UL, 1746713323UL, 2815058477UL, 2152552186UL,
4214791071UL, 2382522482UL, 3713914466UL, 3974765132UL, 348354997UL, 16702
76150UL, 2173074887UL, 381736894UL, 3866219357UL, 1919366695UL, 3635118824U
L, 2298653261UL, 3534332682UL, 1627699897UL, 4168636618UL, 3787938690UL, 21
44231271UL, 2067679462UL, 217001062UL, 2308928337UL, 1620415125UL, 35265591
72UL, 749451561UL, 2456947371UL, 3543607786UL, 1893824735UL, 962598819UL, 2
332807164UL, 1691114891UL, 2543992233UL, 2914780639UL, 1610287145UL, 170059
9697UL, 3185174208UL, 552323208UL, 2367242224UL, 3797136972UL, 3415066418UL
, 2468049249UL, 1677937401UL, 40445671UL, 2886682530UL, 2585715434UL, 19493
2329UL, 2994003812UL, 3099556382UL, 680852222UL, 135838738UL, 1371063256UL,
995454898UL, 3754526418UL, 803635682UL, 634588682UL, 3869250783UL, 2442285
521UL, 1455637058UL, 570621479UL, 2512681851UL, 1220136924UL, 750260121UL,
2909903038UL, 1582019728UL, 955115170UL, 1608265445UL, 2157390890UL, 230367
8604UL, 1568394164UL, 831914289UL, 1971271392UL, 1294799854UL, 1489945167UL
, 442427880UL, 1305083700UL, 1211218668UL, 2380073713UL, 2798736785UL, 2193
524273UL, 3227386915UL, 1636588977UL, 3612937642UL, 435113647UL, 1591761830
UL, 536210039UL, 2475747073UL, 4223795480UL, 1786737271UL, 1444661534UL, 32
49410301UL, 3333695212UL, 4169107188UL, 3280638635UL, 702659930UL, 14441279
70UL, 225340755UL, 2255629368UL, 746584456UL, 3965677674UL, 2671132955UL, 2
080717656UL, 2145343886UL, 3712441197UL, 368422910UL, 1297685674UL, 4076123
901UL, 26214470UL, 2948764826UL, 40503299UL, 1198194334UL, 2100063637UL, 19
66331612UL, 2189582064UL, 2064696934UL, 1797550642UL, 3469793941UL, 2868963
812UL, 851437659UL, 240918534UL, 365060070UL, 3530600064UL, 39695324UL, 175
3898837UL, 1286976449UL, 3131971360UL, 2406485219UL, 3365373704UL, 32241134
03UL, 1651742834UL, 587601940UL, 1574206085UL, 3739575036UL, 1413669616UL,
38172232UL, 293127854UL, 4126190109UL, 1891744061UL, 787878666UL, 456643669
UL, 4228710325UL, 2025132037UL, 1492133135UL, 3122840937UL, 969442079UL, 32
72420439UL, 3836126369UL, 1877655562UL, 2766212758UL, 3867984746UL, 3348077
578UL, 1841216706UL, }, {1676507466UL, 1017841240UL, 2992644565UL, 47693615
8UL, 2468072723UL, 3113105154UL, 1154120402UL, 460889625UL, 1942263502UL, 1
761593999UL, 3020908939UL, 3078194866UL, 310971889UL, 1644896012UL, 3756044
556UL, 3549937583UL, 3710822994UL, 3554313733UL, 2174654326UL, 4251063242UL
, 2340485150UL, 950951909UL, 4288936895UL, 3744348848UL, 706644559UL, 10859
27825UL, 1595992020UL, 3288724966UL, 1367247946UL, 2950094970UL, 3925419886
UL, 2628739022UL, 2528254629UL, 3582224789UL, 3907345559UL, 3373329273UL, 4
255542251UL, 1185418446UL, 4018656113UL, 2854344020UL, 1381160022UL, 364243
8773UL, 4284399225UL, 935780030UL, 4142412144UL, 1263328494UL, 1154237693UL
, 2684443667UL, 3067549398UL, 4253090033UL, 1251034970UL, 1874233020UL, 322
2830495UL, 3866931656UL, 286048055UL, 3146635362UL, 1436483376UL, 282187649
5UL, 3927829532UL, 2648886905UL, 2142862852UL, 1368937545UL, 2647327844UL,
1072219385UL, 2621337706UL, 3543274652UL, 911792564UL, 1204178178UL, 412721
4323UL, 2821691380UL, 3101998294UL, 730811902UL, 1989156224UL, 2872353003UL
, 278290276UL, 1390223786UL, 2657819643UL, 552729795UL, 1736270535UL, 27592
07116UL, 1897013739UL, 3657020278UL, 1387364861UL, 1966588302UL, 1049203087
UL, 486446521UL, 3675999281UL, 714737345UL, 686837530UL, 85509025UL, 360908
9773UL, 2117061768UL, 3935682560UL, 3859508784UL, 4105287041UL, 1808988481U
L, 83680601UL, 1464326680UL, 1657693523UL, 3318062731UL, 1391154023UL, 2344
60119UL, 3551348221UL, 2245244809UL, 3635923821UL, 2814385745UL, 3497626257
UL, 916790795UL, 245338628UL, 2514528380UL, 3711787525UL, 2239286063UL, 105
4058916UL, 3963706010UL, 3176203796UL, 2230543409UL, 2173597546UL, 37867338
92UL, 1396036965UL, 1038764273UL, 2032556038UL, 3216540537UL, 3298170974UL,
1008892557UL, 141155464UL, 1863766055UL, 3931110690UL, 191299053UL, 201913
9711UL, 2409528317UL, 739418419UL, 1377144055UL, 2876702705UL, 3911939673UL
, 1197696462UL, 2814009721UL, 600813233UL, 1535885024UL, 1486280357UL, 3084
650548UL, 2324695947UL, 2293284974UL, 2036339249UL, 3465600153UL, 162444610
8UL, 327866771UL, 3356772175UL, 1826625240UL, 1947102360UL, 3661848193UL, 1
421374867UL, 3228945021UL, 1358646008UL, 1067180174UL, 2190741258UL, 643362
354UL, 109899594UL, 2064362635UL, 3249674888UL, 2165543887UL, 4180291913UL,
1676507466UL, 1017841240UL, 2992644565UL, 476936158UL, 3608467942UL, 31131
05154UL, 1154120402UL, 460889625UL, 1942263502UL, 1862994005UL, 3020908939U
L, 3078194866UL, 310971889UL, 1644896012UL, 693774191UL, 3549937583UL, 3710
822994UL, 3554313733UL, 2174654326UL, 37658897UL, 2340485150UL, 950951909UL
, 4288936895UL, 3744348848UL, 2258231402UL, 1085927825UL, 1595992020UL, 328
8724966UL, 1367247946UL, 3850509554UL, 3925419886UL, 2628739022UL, 25282546
29UL, 3582224789UL, 3124287811UL, 3373329273UL, 4255542251UL, 1185418446UL,
4018656113UL, 1989726178UL, 1381160022UL, 3642438773UL, 4284399225UL, 9357
80030UL, 3622052196UL, 1263328494UL, 1154237693UL, 2684443667UL, 3067549398
UL, 2786224913UL, 1251034970UL, 1874233020UL, 3222830495UL, 3866931656UL, 1
529490307UL, 3146635362UL, 1436483376UL, 2821876495UL, 3927829532UL, 979247
444UL, 2142862852UL, 1368937545UL, 2647327844UL, 1072219385UL, 294065371UL,
3543274652UL, 911792564UL, 1204178178UL, 4127214323UL, 103582737UL, 310199
8294UL, 730811902UL, 1989156224UL, 2872353003UL, 1885087777UL, 1390223786UL
, 2657819643UL, 552729795UL, 1736270535UL, 3325206451UL, 1897013739UL, 3657
020278UL, 1387364861UL, 1966588302UL, 2117065739UL, 486446521UL, 3675999281
UL, 714737345UL, 686837530UL, 3946214694UL, 3609089773UL, 2117061768UL, 393
5682560UL, 3859508784UL, 2916136885UL, 1808988481UL, 83680601UL, 1464326680
UL, 1657693523UL, 3438751781UL, 1391154023UL, 234460119UL, 3551348221UL, 22
45244809UL, 3948410079UL, 2814385745UL, 3497626257UL, 916790795UL, 24533862
8UL, 1767303496UL, 3711787525UL, 2239286063UL, 1054058916UL, 3963706010UL,
4140631909UL, 2230543409UL, 2173597546UL, 3786733892UL, 1396036965UL, 11160
33475UL, 2032556038UL, 3216540537UL, 3298170974UL, 1008892557UL, 667272562U
L, 1863766055UL, 3931110690UL, 191299053UL, 2019139711UL, 272901326UL, 7394
18419UL, 1377144055UL, 2876702705UL, 3911939673UL, 3839312742UL, 2814009721
UL, 600813233UL, 1535885024UL, 1486280357UL, 4256065219UL, 2324695947UL, 22
93284974UL, 2036339249UL, 3465600153UL, 1215859603UL, 327866771UL, 33567721
75UL, 1826625240UL, 1947102360UL, 4240407984UL, 1421374867UL, 3228945021UL,
1358646008UL, 1067180174UL, 4100357988UL, 643362354UL, 109899594UL, 206436
2635UL, 3249674888UL, 2898852084UL, 4180291913UL, 1676507466UL, 1017841240U
L, 2992644565UL, 1569683812UL, 3608467942UL, 3113105154UL, 1154120402UL, 46
0889625UL, 966040649UL, 1862994005UL, 3020908939UL, 3078194866UL, 310971889
UL, 786634113UL, 693774191UL, 3549937583UL, 3710822994UL, 3554313733UL, 157
8429713UL, 37658897UL, 2340485150UL, 950951909UL, 4288936895UL, 2528123823U
L, 2258231402UL, 1085927825UL, 1595992020UL, 3288724966UL, 3544041088UL, 38
50509554UL, 3925419886UL, 2628739022UL, 2528254629UL, 2562145937UL, 3124287
811UL, 3373329273UL, 4255542251UL, 1185418446UL, 3693565710UL, 1989726178UL
, 1381160022UL, 3642438773UL, 4284399225UL, 3271478204UL, 3622052196UL, 126
3328494UL, 1154237693UL, 2684443667UL, 3615401444UL, 2786224913UL, 12510349
70UL, 1874233020UL, 3222830495UL, 2572413057UL, 1529490307UL, 3146635362UL,
1436483376UL, 2821876495UL, 3993894153UL, 979247444UL, 2142862852UL, 13689
37545UL, 2647327844UL, 1353904396UL, 294065371UL, 3543274652UL, 911792564UL
, 1204178178UL, 3165709748UL, 103582737UL, 3101998294UL, 730811902UL, 19891
56224UL, 893293786UL, 1885087777UL, 1390223786UL, 2657819643UL, 552729795UL
, 3388458110UL, 3325206451UL, 1897013739UL, 3657020278UL, 1387364861UL, 302
5318046UL, 2117065739UL, 486446521UL, 3675999281UL, 714737345UL, 2085926890
UL, 3946214694UL, 3609089773UL, 2117061768UL, 3935682560UL, 868009118UL, 29
16136885UL, 1808988481UL, 83680601UL, 1464326680UL, 797410789UL, 3438751781
UL, 1391154023UL, 234460119UL, 3551348221UL, 4068940987UL, 3948410079UL, 28
14385745UL, 3497626257UL, 916790795UL, 3722456098UL, 1767303496UL, 37117875
25UL, 2239286063UL, 1054058916UL, 2030352819UL, 4140631909UL, 2230543409UL,
2173597546UL, 3786733892UL, 3211336683UL, 1116033475UL, 2032556038UL, 3216
540537UL, 3298170974UL, 2589589144UL, 667272562UL, 1863766055UL, 3931110690
UL, 191299053UL, 1139480458UL, 272901326UL, 739418419UL, 1377144055UL, 2876
702705UL, 1954361769UL, 3839312742UL, 2814009721UL, 600813233UL, 1535885024
UL, 3587775605UL, 4256065219UL, 2324695947UL, 2293284974UL, 2036339249UL, 1
534849280UL, 1215859603UL, 327866771UL, 3356772175UL, 1826625240UL, 7203726
69UL, 4240407984UL, 1421374867UL, 3228945021UL, 1358646008UL, 3409069246UL,
4100357988UL, 643362354UL, 109899594UL, 2064362635UL, 4243434294UL, 289885
2084UL, 4180291913UL, 1676507466UL, 1017841240UL, 3243922356UL, 1569683812U
L, 3608467942UL, 3113105154UL, 1154120402UL, 1479311403UL, 966040649UL, 186
2994005UL, 3020908939UL, 3078194866UL, 1556392996UL, 786634113UL, 693774191
UL, 3549937583UL, 3710822994UL, 920664071UL, 1578429713UL, 37658897UL, 2340
485150UL, 950951909UL, 740197415UL, 2528123823UL, 2258231402UL, 1085927825U
L, 1595992020UL, 2580760267UL, 3544041088UL, 3850509554UL, 3925419886UL, 26
28739022UL, 3867556156UL, 2562145937UL, 3124287811UL, 3373329273UL, 4255542
251UL, 3185271749UL, 3693565710UL, 1989726178UL, 1381160022UL, 3642438773UL
, 3042165367UL, 3271478204UL, 3622052196UL, 1263328494UL, 1154237693UL, 101
6814036UL, 3615401444UL, 2786224913UL, 1251034970UL, 1874233020UL, 29560869
71UL, 2572413057UL, 1529490307UL, 3146635362UL, 1436483376UL, 1513970396UL,
3993894153UL, 979247444UL, 2142862852UL, 1368937545UL, 3275665128UL, 13539
04396UL, 294065371UL, 3543274652UL, 911792564UL, 2209636872UL, 3165709748UL
, 103582737UL, 3101998294UL, 730811902UL, 965151434UL, 893293786UL, 1885087
777UL, 1390223786UL, 2657819643UL, 3278634059UL, 3388458110UL, 3325206451UL
, 1897013739UL, 3657020278UL, 4293473749UL, 3025318046UL, 2117065739UL, 486
446521UL, 3675999281UL, 620561205UL, 2085926890UL, 3946214694UL, 3609089773
UL, 2117061768UL, 163384588UL, 868009118UL, 2916136885UL, 1808988481UL, 836
80601UL, 10243015UL, 797410789UL, 3438751781UL, 1391154023UL, 234460119UL,
1278218413UL, 4068940987UL, 3948410079UL, 2814385745UL, 3497626257UL, 12332
72798UL, 3722456098UL, 1767303496UL, 3711787525UL, 2239286063UL, 3968895688
UL, 2030352819UL, 4140631909UL, 2230543409UL, 2173597546UL, 2866251044UL, 3
211336683UL, 1116033475UL, 2032556038UL, 3216540537UL, 4233849723UL, 258958
9144UL, 667272562UL, 1863766055UL, 3931110690UL, 2468422423UL, 1139480458UL
, 272901326UL, 739418419UL, 1377144055UL, 4240143411UL, 1954361769UL, 38393
12742UL, 2814009721UL, 600813233UL, 3976840004UL, 3587775605UL, 4256065219U
L, 2324695947UL, 2293284974UL, 437604123UL, 1534849280UL, 1215859603UL, 327
866771UL, 3356772175UL, 2757237699UL, 720372669UL, 4240407984UL, 1421374867
UL, 3228945021UL, 3284801305UL, 3409069246UL, 4100357988UL, 643362354UL, 10
9899594UL, 1301585321UL, 2528806870UL, 1838904064UL, 448772403UL, 109784974
0UL, 1899994097UL, 618309123UL, 1911948510UL, 2309256224UL, 1861398151UL, 9
05306403UL, 1067595802UL, 36868624UL, 3780886191UL, 835126206UL, 3190251977
UL, 2672497726UL, 2085944002UL, 2912993968UL, 2493776706UL, 667136329UL, 14
74890786UL, 2383346554UL, 943528949UL, 3376706013UL, 2495573574UL, 14495634
5UL, 793159960UL, 1591274917UL, 477107637UL, 1383815442UL, 67384899UL, 2355
242218UL, 1687409818UL, 3801093871UL, 2108217811UL, 3455908733UL, 417216079
7UL, 3935534685UL, 631067839UL, 1187677548UL, 2280856137UL, 3020767646UL, 2
063176246UL, 3736904984UL, 2952933848UL, 2975164686UL, 4144473303UL, 346709
77UL, 1250976509UL, 3484166554UL, 1532744745UL, 225700994UL, 1878713627UL,
2122358980UL, 1456610194UL, 2917522161UL, 2818947075UL, 102678939UL, 537438
58UL, 2095250656UL, 4023979225UL, 3094092874UL, 4128760696UL, 3411610028UL,
3020200609UL, 2225866341UL, 586320946UL, 63813522UL, 1238216159UL, 2825692
263UL, 2169937231UL, 3298517640UL, 1542128261UL, 2205544184UL, 1258655704UL
, 2629012083UL, 4113650203UL, 3198617867UL, 2742310794UL, 3372657381UL, 311
5904410UL, 1948638822UL, 1123521744UL, 1080429281UL, 4086706732UL, 41426932
11UL, 817377147UL, 2570194641UL, 26001503UL, 2861456160UL, 4185725555UL, 25
73003804UL, 1618628779UL, 2588489212UL, 3996192609UL, 1555844274UL, 1003123
505UL, 1326350123UL, 1130583849UL, 3017128756UL, 74119042UL, 4041266437UL,
1938014170UL, 3528465794UL, 4203969698UL, 1913054398UL, 3617979809UL, 22188
10167UL, 2453899816UL, 1997423206UL, 477446533UL, 303090065UL, 757937082UL,
1523238256UL, 3140505311UL, 1422588701UL, 3642014639UL, 1740624195UL, 1276
017154UL, 3072526193UL, 3675105122UL, 1335122682UL, 4080595263UL, 230851942
0UL, 3299182769UL, 1461978532UL, 3098694217UL, 2982399822UL, 3088698511UL,
586759229UL, 3548750902UL, 1449857891UL, 2866451663UL, 2525162286UL, 572946
02UL, 4107991297UL, 1214672265UL, 2940391280UL, 4285346034UL, 3338216759UL,
737207923UL, 4264163846UL, 59219141UL, 2300024654UL, 1876616814UL, 1976543
605UL, 783571061UL, 1724699622UL, 1967524469UL, 1650309916UL, 3322257631UL,
3975521122UL, 273342162UL, 1156754241UL, 185315896UL, 3368133921UL, 663146
55UL, 4153777915UL, 3519901897UL, }, {3672467167UL, 68684525UL, 1738833632U
L, 3081329135UL, 2583806115UL, 2291130512UL, 503032614UL, 3658059597UL, 571
493931UL, 685537959UL, 3498787788UL, 422428426UL, 3879256913UL, 1173158320U
L, 4000800121UL, 298972869UL, 1718342816UL, 2541691685UL, 2490502642UL, 232
1452806UL, 4223212804UL, 1812334632UL, 3717655725UL, 4238191852UL, 30013071
65UL, 2621896355UL, 2572404999UL, 3590094954UL, 760765206UL, 2293618001UL,
1392353032UL, 1733137169UL, 2674005018UL, 4067961151UL, 1505710487UL, 45107
8217UL, 2591688848UL, 12635611UL, 507045428UL, 694822241UL, 1789383090UL, 1
140183890UL, 1720695967UL, 1994318191UL, 3340349873UL, 2793804971UL, 105443
3135UL, 2345087879UL, 3179939285UL, 1651968615UL, 1793223686UL, 1055357758U
L, 914271617UL, 483007580UL, 2127727816UL, 2754998083UL, 3179053982UL, 5984
42002UL, 1950227301UL, 213053613UL, 3566888111UL, 2832258993UL, 4260365359U
L, 443662829UL, 1706542890UL, 3852730296UL, 3643260763UL, 2163607277UL, 181
2905006UL, 171529637UL, 215187467UL, 2369406909UL, 1929000706UL, 2572441025
UL, 2133955541UL, 810692262UL, 1337974799UL, 4030350704UL, 2159178715UL, 37
69451556UL, 1026825278UL, 593628480UL, 1817383139UL, 878832429UL, 225387635
0UL, 203612980UL, 2102950440UL, 3407143936UL, 1912362251UL, 1595387637UL, 2
827580539UL, 305467658UL, 3292706746UL, 44135525UL, 4001933553UL, 369734308
9UL, 760470915UL, 587414402UL, 1419378814UL, 2852774010UL, 3891626781UL, 27
57016765UL, 1090707384UL, 3997074427UL, 1047182100UL, 2855539022UL, 3622915
9UL, 1591415533UL, 3471572739UL, 1237952140UL, 2614469314UL, 213338525UL, 8
86212578UL, 2620301943UL, 713590207UL, 2430496777UL, 1198164420UL, 26448416
98UL, 3654164701UL, 36283572UL, 1461695896UL, 1770331341UL, 1641501876UL, 3
470919184UL, 3181021559UL, 3053795110UL, 3533531372UL, 3134337355UL, 668308
383UL, 388340999UL, 3221275220UL, 1589659138UL, 294382235UL, 1447443579UL,
690177534UL, 1799726917UL, 2838977761UL, 4172949119UL, 2360858031UL, 159385
920UL, 2248389027UL, 1790015671UL, 3925738275UL, 1049918544UL, 4107349511UL
, 1619955951UL, 4188275966UL, 1672572975UL, 2672697497UL, 1863413666UL, 747
724021UL, 4037561738UL, 1605940213UL, 445253292UL, 3362434828UL, 610898209U
L, 1473244091UL, 735444769UL, 1540599852UL, 2449351720UL, 1032410949UL, 367
2467167UL, 68684525UL, 1738833632UL, 3081329135UL, 519684794UL, 2291130512U
L, 503032614UL, 3658059597UL, 571493931UL, 2400186105UL, 3498787788UL, 4224
28426UL, 3879256913UL, 1173158320UL, 4120704752UL, 298972869UL, 1718342816U
L, 2541691685UL, 2490502642UL, 1686027891UL, 4223212804UL, 1812334632UL, 37
17655725UL, 4238191852UL, 642431972UL, 2621896355UL, 2572404999UL, 35900949
54UL, 760765206UL, 2949609717UL, 1392353032UL, 1733137169UL, 2674005018UL,
4067961151UL, 1526077846UL, 451078217UL, 2591688848UL, 12635611UL, 50704542
8UL, 2417951415UL, 1789383090UL, 1140183890UL, 1720695967UL, 1994318191UL,
3465605863UL, 2793804971UL, 1054433135UL, 2345087879UL, 3179939285UL, 30792
97626UL, 1793223686UL, 1055357758UL, 914271617UL, 483007580UL, 306802527UL,
2754998083UL, 3179053982UL, 598442002UL, 1950227301UL, 2473418737UL, 35668
88111UL, 2832258993UL, 4260365359UL, 443662829UL, 2097776414UL, 3852730296U
L, 3643260763UL, 2163607277UL, 1812905006UL, 3957721904UL, 215187467UL, 236
9406909UL, 1929000706UL, 2572441025UL, 3779486126UL, 810692262UL, 133797479
9UL, 4030350704UL, 2159178715UL, 1127012865UL, 1026825278UL, 593628480UL, 1
817383139UL, 878832429UL, 361018423UL, 203612980UL, 2102950440UL, 340714393
6UL, 1912362251UL, 1475218277UL, 2827580539UL, 305467658UL, 3292706746UL, 4
4135525UL, 1900092336UL, 3697343089UL, 760470915UL, 587414402UL, 1419378814
UL, 343303227UL, 3891626781UL, 2757016765UL, 1090707384UL, 3997074427UL, 74
5490961UL, 2855539022UL, 36229159UL, 1591415533UL, 3471572739UL, 3920625546
UL, 2614469314UL, 213338525UL, 886212578UL, 2620301943UL, 827771411UL, 2430
496777UL, 1198164420UL, 2644841698UL, 3654164701UL, 2747674190UL, 146169589
6UL, 1770331341UL, 1641501876UL, 3470919184UL, 919857376UL, 3053795110UL, 3
533531372UL, 3134337355UL, 668308383UL, 201138876UL, 3221275220UL, 15896591
38UL, 294382235UL, 1447443579UL, 4211579707UL, 1799726917UL, 2838977761UL,
4172949119UL, 2360858031UL, 416103844UL, 2248389027UL, 1790015671UL, 392573
8275UL, 1049918544UL, 3481887924UL, 1619955951UL, 4188275966UL, 1672572975U
L, 2672697497UL, 564854400UL, 747724021UL, 4037561738UL, 1605940213UL, 4452
53292UL, 604900912UL, 610898209UL, 1473244091UL, 735444769UL, 1540599852UL,
3036173307UL, 1032410949UL, 3672467167UL, 68684525UL, 1738833632UL, 973022
696UL, 519684794UL, 2291130512UL, 503032614UL, 3658059597UL, 1500301452UL,
2400186105UL, 3498787788UL, 422428426UL, 3879256913UL, 3923611748UL, 412070
4752UL, 298972869UL, 1718342816UL, 2541691685UL, 2323881484UL, 1686027891UL
, 4223212804UL, 1812334632UL, 3717655725UL, 2109094458UL, 642431972UL, 2621
896355UL, 2572404999UL, 3590094954UL, 1837882537UL, 2949609717UL, 139235303
2UL, 1733137169UL, 2674005018UL, 3252348987UL, 1526077846UL, 451078217UL, 2
591688848UL, 12635611UL, 3971261781UL, 2417951415UL, 1789383090UL, 11401838
90UL, 1720695967UL, 2906966040UL, 3465605863UL, 2793804971UL, 1054433135UL,
2345087879UL, 915518921UL, 3079297626UL, 1793223686UL, 1055357758UL, 91427
1617UL, 791633499UL, 306802527UL, 2754998083UL, 3179053982UL, 598442002UL,
324402573UL, 2473418737UL, 3566888111UL, 2832258993UL, 4260365359UL, 216804
6398UL, 2097776414UL, 3852730296UL, 3643260763UL, 2163607277UL, 2595175979U
L, 3957721904UL, 215187467UL, 2369406909UL, 1929000706UL, 657446369UL, 3779
486126UL, 810692262UL, 1337974799UL, 4030350704UL, 1865557469UL, 1127012865
UL, 1026825278UL, 593628480UL, 1817383139UL, 3414354529UL, 361018423UL, 203
612980UL, 2102950440UL, 3407143936UL, 1739372987UL, 1475218277UL, 282758053
9UL, 305467658UL, 3292706746UL, 825045562UL, 1900092336UL, 3697343089UL, 76
0470915UL, 587414402UL, 2000637694UL, 343303227UL, 3891626781UL, 2757016765
UL, 1090707384UL, 4015377800UL, 745490961UL, 2855539022UL, 36229159UL, 1591
415533UL, 2208656873UL, 3920625546UL, 2614469314UL, 213338525UL, 886212578U
L, 2729976209UL, 827771411UL, 2430496777UL, 1198164420UL, 2644841698UL, 192
2667440UL, 2747674190UL, 1461695896UL, 1770331341UL, 1641501876UL, 35753531
1UL, 919857376UL, 3053795110UL, 3533531372UL, 3134337355UL, 1004072597UL, 2
01138876UL, 3221275220UL, 1589659138UL, 294382235UL, 1148950143UL, 42115797
07UL, 1799726917UL, 2838977761UL, 4172949119UL, 892664404UL, 416103844UL, 2
248389027UL, 1790015671UL, 3925738275UL, 2612357890UL, 3481887924UL, 161995
5951UL, 4188275966UL, 1672572975UL, 2005534713UL, 564854400UL, 747724021UL,
4037561738UL, 1605940213UL, 2620990454UL, 604900912UL, 610898209UL, 147324
4091UL, 735444769UL, 3571225334UL, 3036173307UL, 1032410949UL, 3672467167UL
, 68684525UL, 3327351604UL, 973022696UL, 519684794UL, 2291130512UL, 5030326
14UL, 3814902238UL, 1500301452UL, 2400186105UL, 3498787788UL, 422428426UL,
1756753750UL, 3923611748UL, 4120704752UL, 298972869UL, 1718342816UL, 652903
081UL, 2323881484UL, 1686027891UL, 4223212804UL, 1812334632UL, 1599640566UL
, 2109094458UL, 642431972UL, 2621896355UL, 2572404999UL, 1668409355UL, 1837
882537UL, 2949609717UL, 1392353032UL, 1733137169UL, 3691709793UL, 325234898
7UL, 1526077846UL, 451078217UL, 2591688848UL, 3353622601UL, 3971261781UL, 2
417951415UL, 1789383090UL, 1140183890UL, 4113853791UL, 2906966040UL, 346560
5863UL, 2793804971UL, 1054433135UL, 2195882948UL, 915518921UL, 3079297626UL
, 1793223686UL, 1055357758UL, 898713552UL, 791633499UL, 306802527UL, 275499
8083UL, 3179053982UL, 2469350088UL, 324402573UL, 2473418737UL, 3566888111UL
, 2832258993UL, 1377718274UL, 2168046398UL, 2097776414UL, 3852730296UL, 364
3260763UL, 3492388484UL, 2595175979UL, 3957721904UL, 215187467UL, 236940690
9UL, 4243449339UL, 657446369UL, 3779486126UL, 810692262UL, 1337974799UL, 39
60230785UL, 1865557469UL, 1127012865UL, 1026825278UL, 593628480UL, 73279331
2UL, 3414354529UL, 361018423UL, 203612980UL, 2102950440UL, 2401792405UL, 17
39372987UL, 1475218277UL, 2827580539UL, 305467658UL, 2454275289UL, 82504556
2UL, 1900092336UL, 3697343089UL, 760470915UL, 2146882409UL, 2000637694UL, 3
43303227UL, 3891626781UL, 2757016765UL, 3997473261UL, 4015377800UL, 7454909
61UL, 2855539022UL, 36229159UL, 2375394427UL, 2208656873UL, 3920625546UL, 2
614469314UL, 213338525UL, 2055366274UL, 2729976209UL, 827771411UL, 24304967
77UL, 1198164420UL, 1789631187UL, 1922667440UL, 2747674190UL, 1461695896UL,
1770331341UL, 4284442852UL, 357535311UL, 919857376UL, 3053795110UL, 353353
1372UL, 2124270060UL, 1004072597UL, 201138876UL, 3221275220UL, 1589659138UL
, 1418386120UL, 1148950143UL, 4211579707UL, 1799726917UL, 2838977761UL, 354
0708069UL, 892664404UL, 416103844UL, 2248389027UL, 1790015671UL, 3936883UL,
2612357890UL, 3481887924UL, 1619955951UL, 4188275966UL, 2963623483UL, 2005
534713UL, 564854400UL, 747724021UL, 4037561738UL, 3431155922UL, 2620990454U
L, 604900912UL, 610898209UL, 1473244091UL, 3880001339UL, 2879060316UL, 3300
897679UL, 3960972039UL, 3201086624UL, 3814462934UL, 3426650044UL, 193088163
2UL, 1981178788UL, 2956279691UL, 4272406256UL, 372705521UL, 1359389771UL, 1
590302979UL, 3940206208UL, 3817999127UL, 2527835456UL, 2739078164UL, 716997
849UL, 3235607043UL, 2550297745UL, 3688700200UL, 354502605UL, 2285793656UL,
2339138034UL, 3912354142UL, 2262255668UL, 469322622UL, 1319943359UL, 19161
01235UL, 200441823UL, 509436982UL, 2160284593UL, 1687919695UL, 4153615582UL
, 495735041UL, 3694469424UL, 2086893117UL, 4223008799UL, 105344742UL, 16980
33424UL, 1149223145UL, 4183918790UL, 4176151950UL, 415739351UL, 817762972UL
, 3768072560UL, 1931430949UL, 2698979439UL, 3481477932UL, 1994322914UL, 407
8299950UL, 1268233995UL, 3254069145UL, 91029129UL, 498234704UL, 1636613942U
L, 3710087092UL, 3876816560UL, 3510446387UL, 3870169008UL, 1370156410UL, 24
42498047UL, 2324396523UL, 1258730334UL, 621954739UL, 1053015373UL, 49182071
7UL, 3386515432UL, 2203703266UL, 120167176UL, 2383669740UL, 1038666440UL, 2
927342870UL, 3583197824UL, 1236241846UL, 2474675929UL, 679052891UL, 2451259
584UL, 2177706146UL, 606842882UL, 3546980104UL, 2289281509UL, 353873434UL,
2041926837UL, 1238346748UL, 2729109726UL, 2843938395UL, 2938124210UL, 25544
43866UL, 1494477920UL, 693378319UL, 2020963566UL, 2000385949UL, 3744098787U
L, 650307220UL, 2631327075UL, 1529128757UL, 595871428UL, 3206666562UL, 4580
62987UL, 875238192UL, 3729317374UL, 1368843921UL, 3478430230UL, 3234384578U
L, 3232435428UL, 321359326UL, 994274524UL, 361184397UL, 4285497594UL, 91526
3578UL, 1486882838UL, 9988613UL, 829077170UL, 677216046UL, 4141828204UL, 16
5804609UL, 1086678519UL, 2933434608UL, 1351662802UL, 2640085040UL, 26115029
32UL, 2033698714UL, 2008873254UL, 3995557835UL, 1020873906UL, 67873555UL, 2
230337823UL, 1263800417UL, 1148712155UL, 3985159589UL, 2979503513UL, 285471
4997UL, 1539343345UL, 2751484352UL, 1569100732UL, 2020758949UL, 2126757134U
L, 3426641899UL, 2808587825UL, 1953320148UL, 1096398464UL, 1502907172UL, 37
51230087UL, 765557661UL, 765290990UL, 3056075500UL, 2040620632UL, 422573751
UL, 3613558930UL, 1741145769UL, 273531216UL, 837238736UL, 494297893UL, 2903
251124UL, 1636782182UL, 4256592784UL, 3652746656UL, 4258393217UL, }, | | {850664906UL, 2293210629UL, 1517805917UL, 1215500405UL, 1612415445UL, 64538
8200UL, 824349799UL, 3517232886UL, 4075591755UL, 3089899292UL, 4249786064UL
, 3811424903UL, 1100783479UL, 53649761UL, 2817264826UL, 3159462529UL, 16548
48550UL, 950025444UL, 3095510002UL, 4080567211UL, 4111078399UL, 3241719305U
L, 2788212779UL, 4256963770UL, 2426893717UL, 4190211142UL, 1420776905UL, 37
80537969UL, 1102912875UL, 1657948873UL, 3354905256UL, 2519610308UL, 5157776
63UL, 3396785394UL, 1832603711UL, 1154211550UL, 1915690212UL, 1933919046UL,
789578337UL, 337961173UL, 1359089498UL, 2249086205UL, 3417955173UL, 862571
348UL, 528120760UL, 1265685672UL, 1970052076UL, 3585976752UL, 3645339918UL,
312171257UL, 1360991400UL, 1994321680UL, 2327168468UL, 2540437053UL, 11804
83641UL, 2217962701UL, 182726833UL, 590204372UL, 1904496495UL, 2545607041UL
, 3697978033UL, 1084030545UL, 3397906968UL, 2192325323UL, 2704204176UL, 106
9092002UL, 2364406907UL, 1578647245UL, 3561974633UL, 3437665426UL, 14641273
05UL, 1616628807UL, 2243114101UL, 3639967880UL, 1702613633UL, 2437350057UL,
39991274UL, 2024323584UL, 3795072940UL, 3604530798UL, 443099203UL, 6435362
12UL, 1919517328UL, 3931285769UL, 427935569UL, 276421624UL, 2492081750UL, 2
62729512UL, 3088549877UL, 2922650665UL, 1816283755UL, 4246096489UL, 8425759
14UL, 1460435650UL, 3050522190UL, 2640849794UL, 3697925816UL, 3465779075UL,
3856929655UL, 1365559780UL, 2897029415UL, 2747033756UL, 3611830629UL, 1891
542518UL, 1897590206UL, 437451803UL, 677924906UL, 123809117UL, 3940574372UL
, 687640291UL, 3488484529UL, 470218446UL, 1092571016UL, 1537938503UL, 10733
23937UL, 611300083UL, 3809285994UL, 3975678726UL, 925845389UL, 2514775760UL
, 2859302390UL, 2761919483UL, 993285307UL, 164095287UL, 3736193671UL, 20789
46336UL, 1418537059UL, 1202525920UL, 4234029440UL, 1313593624UL, 2484428922
UL, 1833969372UL, 661495122UL, 2217907395UL, 2795045321UL, 2950835531UL, 14
02379354UL, 351314168UL, 1902476749UL, 1914974334UL, 2873973176UL, 13212036
03UL, 3316118265UL, 3282193947UL, 1342191737UL, 793441242UL, 3281524559UL,
296088733UL, 487851702UL, 712098215UL, 1388727135UL, 1705533557UL, 35578002
92UL, 399729516UL, 1355829467UL, 291276309UL, 421164833UL, 1318404599UL, 20
64519128UL, 1161612642UL, 2076623594UL, 850664906UL, 2293210629UL, 15178059
17UL, 1215500405UL, 3847487204UL, 645388200UL, 824349799UL, 3517232886UL, 4
075591755UL, 2755872609UL, 4249786064UL, 3811424903UL, 1100783479UL, 536497
61UL, 1417544262UL, 3159462529UL, 1654848550UL, 950025444UL, 3095510002UL,
1908900347UL, 4111078399UL, 3241719305UL, 2788212779UL, 4256963770UL, 37502
58343UL, 4190211142UL, 1420776905UL, 3780537969UL, 1102912875UL, 1690550UL,
3354905256UL, 2519610308UL, 515777663UL, 3396785394UL, 2658162202UL, 11542
11550UL, 1915690212UL, 1933919046UL, 789578337UL, 189880016UL, 1359089498UL
, 2249086205UL, 3417955173UL, 862571348UL, 998719835UL, 1265685672UL, 19700
52076UL, 3585976752UL, 3645339918UL, 2973042959UL, 1360991400UL, 1994321680
UL, 2327168468UL, 2540437053UL, 2283905032UL, 2217962701UL, 182726833UL, 59
0204372UL, 1904496495UL, 110719262UL, 3697978033UL, 1084030545UL, 339790696
8UL, 2192325323UL, 4133333579UL, 1069092002UL, 2364406907UL, 1578647245UL,
3561974633UL, 3629845331UL, 1464127305UL, 1616628807UL, 2243114101UL, 36399
67880UL, 3256744141UL, 2437350057UL, 39991274UL, 2024323584UL, 3795072940UL
, 1024703328UL, 443099203UL, 643536212UL, 1919517328UL, 3931285769UL, 27551
67056UL, 276421624UL, 2492081750UL, 262729512UL, 3088549877UL, 2817867653UL
, 1816283755UL, 4246096489UL, 842575914UL, 1460435650UL, 2276077438UL, 2640
849794UL, 3697925816UL, 3465779075UL, 3856929655UL, 130551477UL, 2897029415
UL, 2747033756UL, 3611830629UL, 1891542518UL, 804565809UL, 437451803UL, 677
924906UL, 123809117UL, 3940574372UL, 2446610749UL, 3488484529UL, 470218446U
L, 1092571016UL, 1537938503UL, 1502147484UL, 611300083UL, 3809285994UL, 397
5678726UL, 925845389UL, 872826112UL, 2859302390UL, 2761919483UL, 993285307U
L, 164095287UL, 3901654538UL, 2078946336UL, 1418537059UL, 1202525920UL, 423
4029440UL, 704759480UL, 2484428922UL, 1833969372UL, 661495122UL, 2217907395
UL, 3287413716UL, 2950835531UL, 1402379354UL, 351314168UL, 1902476749UL, 20
33316109UL, 2873973176UL, 1321203603UL, 3316118265UL, 3282193947UL, 1316780
684UL, 793441242UL, 3281524559UL, 296088733UL, 487851702UL, 314311643UL, 13
88727135UL, 1705533557UL, 3557800292UL, 399729516UL, 1660074989UL, 29127630
9UL, 421164833UL, 1318404599UL, 2064519128UL, 3156334112UL, 2076623594UL, 8
50664906UL, 2293210629UL, 1517805917UL, 335452425UL, 3847487204UL, 64538820
0UL, 824349799UL, 3517232886UL, 954487767UL, 2755872609UL, 4249786064UL, 38
11424903UL, 1100783479UL, 3408594583UL, 1417544262UL, 3159462529UL, 1654848
550UL, 950025444UL, 324339737UL, 1908900347UL, 4111078399UL, 3241719305UL,
2788212779UL, 1890540205UL, 3750258343UL, 4190211142UL, 1420776905UL, 37805
37969UL, 3716648585UL, 1690550UL, 3354905256UL, 2519610308UL, 515777663UL,
3758156132UL, 2658162202UL, 1154211550UL, 1915690212UL, 1933919046UL, 84414
9171UL, 189880016UL, 1359089498UL, 2249086205UL, 3417955173UL, 1031812215UL
, 998719835UL, 1265685672UL, 1970052076UL, 3585976752UL, 3174204115UL, 2973
042959UL, 1360991400UL, 1994321680UL, 2327168468UL, 714016907UL, 2283905032
UL, 2217962701UL, 182726833UL, 590204372UL, 2151450260UL, 110719262UL, 3697
978033UL, 1084030545UL, 3397906968UL, 767772303UL, 4133333579UL, 1069092002
UL, 2364406907UL, 1578647245UL, 42955292UL, 3629845331UL, 1464127305UL, 161
6628807UL, 2243114101UL, 3222189776UL, 3256744141UL, 2437350057UL, 39991274
UL, 2024323584UL, 3142424684UL, 1024703328UL, 443099203UL, 643536212UL, 191
9517328UL, 918511196UL, 2755167056UL, 276421624UL, 2492081750UL, 262729512U
L, 4246877536UL, 2817867653UL, 1816283755UL, 4246096489UL, 842575914UL, 142
5765936UL, 2276077438UL, 2640849794UL, 3697925816UL, 3465779075UL, 14917025
26UL, 130551477UL, 2897029415UL, 2747033756UL, 3611830629UL, 1844578694UL,
804565809UL, 437451803UL, 677924906UL, 123809117UL, 3419189841UL, 244661074
9UL, 3488484529UL, 470218446UL, 1092571016UL, 3272535988UL, 1502147484UL, 6
11300083UL, 3809285994UL, 3975678726UL, 2853681168UL, 872826112UL, 28593023
90UL, 2761919483UL, 993285307UL, 1434560128UL, 3901654538UL, 2078946336UL,
1418537059UL, 1202525920UL, 2530097881UL, 704759480UL, 2484428922UL, 183396
9372UL, 661495122UL, 503878844UL, 3287413716UL, 2950835531UL, 1402379354UL,
351314168UL, 4131886119UL, 2033316109UL, 2873973176UL, 1321203603UL, 33161
18265UL, 237900321UL, 1316780684UL, 793441242UL, 3281524559UL, 296088733UL,
1730738847UL, 314311643UL, 1388727135UL, 1705533557UL, 3557800292UL, 15538
35665UL, 1660074989UL, 291276309UL, 421164833UL, 1318404599UL, 964731488UL,
3156334112UL, 2076623594UL, 850664906UL, 2293210629UL, 1105350579UL, 33545
2425UL, 3847487204UL, 645388200UL, 824349799UL, 2789953706UL, 954487767UL,
2755872609UL, 4249786064UL, 3811424903UL, 3937839949UL, 3408594583UL, 14175
44262UL, 3159462529UL, 1654848550UL, 624060530UL, 324339737UL, 1908900347UL
, 4111078399UL, 3241719305UL, 2294919498UL, 1890540205UL, 3750258343UL, 419
0211142UL, 1420776905UL, 2279133729UL, 3716648585UL, 1690550UL, 3354905256U
L, 2519610308UL, 3563975602UL, 3758156132UL, 2658162202UL, 1154211550UL, 19
15690212UL, 3505586122UL, 844149171UL, 189880016UL, 1359089498UL, 224908620
5UL, 2389487504UL, 1031812215UL, 998719835UL, 1265685672UL, 1970052076UL, 2
798611919UL, 3174204115UL, 2973042959UL, 1360991400UL, 1994321680UL, 168413
4678UL, 714016907UL, 2283905032UL, 2217962701UL, 182726833UL, 1734988742UL,
2151450260UL, 110719262UL, 3697978033UL, 1084030545UL, 159906818UL, 767772
303UL, 4133333579UL, 1069092002UL, 2364406907UL, 1290801202UL, 42955292UL,
3629845331UL, 1464127305UL, 1616628807UL, 987794861UL, 3222189776UL, 325674
4141UL, 2437350057UL, 39991274UL, 3644076751UL, 3142424684UL, 1024703328UL,
443099203UL, 643536212UL, 1487589384UL, 918511196UL, 2755167056UL, 2764216
24UL, 2492081750UL, 137688638UL, 4246877536UL, 2817867653UL, 1816283755UL,
4246096489UL, 1518475380UL, 1425765936UL, 2276077438UL, 2640849794UL, 36979
25816UL, 4226506771UL, 1491702526UL, 130551477UL, 2897029415UL, 2747033756U
L, 2033599579UL, 1844578694UL, 804565809UL, 437451803UL, 677924906UL, 27490
65512UL, 3419189841UL, 2446610749UL, 3488484529UL, 470218446UL, 290444026UL
, 3272535988UL, 1502147484UL, 611300083UL, 3809285994UL, 2546040767UL, 2853
681168UL, 872826112UL, 2859302390UL, 2761919483UL, 4097961150UL, 1434560128
UL, 3901654538UL, 2078946336UL, 1418537059UL, 2725734455UL, 2530097881UL, 7
04759480UL, 2484428922UL, 1833969372UL, 3999408333UL, 503878844UL, 32874137
16UL, 2950835531UL, 1402379354UL, 3861442503UL, 4131886119UL, 2033316109UL,
2873973176UL, 1321203603UL, 1267331405UL, 237900321UL, 1316780684UL, 79344
1242UL, 3281524559UL, 1273427916UL, 1730738847UL, 314311643UL, 1388727135UL
, 1705533557UL, 1474310231UL, 1553835665UL, 1660074989UL, 291276309UL, 4211
64833UL, 3884815658UL, 3088049345UL, 3307042227UL, 3228948601UL, 1717605083
UL, 1864502063UL, 3799516572UL, 2372822470UL, 2691586476UL, 1172840854UL, 1
577099080UL, 870101866UL, 2139291021UL, 406996656UL, 255568268UL, 897760202
UL, 674745664UL, 885214361UL, 3753233375UL, 3015215223UL, 1711461259UL, 324
1363282UL, 2125360928UL, 2493601640UL, 2350228245UL, 3434627328UL, 20956429
63UL, 3360932494UL, 3287396242UL, 4070512427UL, 3415702664UL, 1958354224UL,
3280206940UL, 3929504236UL, 3390499817UL, 4144225735UL, 3621750606UL, 3205
006592UL, 3495743785UL, 269239326UL, 2181299371UL, 2898796651UL, 2613623219
UL, 3988711298UL, 2162437858UL, 949553433UL, 3289670000UL, 3559525307UL, 33
66925567UL, 2112148665UL, 955626393UL, 1790865381UL, 699223558UL, 388958430
1UL, 1020750250UL, 4105283899UL, 2295851818UL, 4045668915UL, 2224770025UL,
766386910UL, 4265157386UL, 89139307UL, 2099710177UL, 1012450874UL, 18754924
46UL, 1927399417UL, 767450812UL, 654474783UL, 4265293038UL, 4041215389UL, 4
102336947UL, 4263617328UL, 2135826340UL, 2317231535UL, 3773895729UL, 403151
111UL, 1400693138UL, 4255050194UL, 755369466UL, 2325764302UL, 2617301159UL,
4165707294UL, 1206304709UL, 2415645397UL, 4276004841UL, 1457022279UL, 6626
60652UL, 795140282UL, 828519889UL, 805830562UL, 1179976369UL, 2212548232UL,
755708248UL, 1034682071UL, 899950902UL, 1906046264UL, 1861009040UL, 310711
525UL, 920739741UL, 2322414272UL, 3179236470UL, 81822135UL, 4111390320UL, 1
800166783UL, 112253014UL, 688771939UL, 1050990794UL, 3124647483UL, 28705217
1UL, 1363630156UL, 3447798279UL, 1405733552UL, 3075862538UL, 1682808202UL,
1595154222UL, 1173705692UL, 680713285UL, 2748212230UL, 568610527UL, 3434965
538UL, 1114942930UL, 2835858745UL, 2575992250UL, 3243355150UL, 2127580225UL
, 1855934450UL, 3915941751UL, 2228679809UL, 1514780124UL, 1506688039UL, 103
3083295UL, 793807083UL, 1120681149UL, 4105670165UL, 3999570340UL, 208302013
1UL, 1213356023UL, 3684882757UL, 3375797774UL, 3577986103UL, 2092046164UL,
2593847443UL, 1826450612UL, 367828409UL, 3198272513UL, 1941316667UL, 943707
510UL, 907134807UL, 2020457947UL, 1462193665UL, 2964617539UL, 4216491663UL,
2625270800UL, 2395371467UL, 3691003028UL, 3659016793UL, 2381847054UL, 3513
105567UL, 3013019506UL, 2731245927UL, }, {1680024716UL, 2112340059UL, 33874
75367UL, 2080916186UL, 1431532386UL, 3907378472UL, 2636491350UL, 2176128529
UL, 2236616671UL, 3736851460UL, 2604001339UL, 3893075234UL, 3495918635UL, 4
116370522UL, 1384310379UL, 3660102574UL, 2030233939UL, 2759207091UL, 493479
23UL, 97526506UL, 2566932710UL, 1566181275UL, 3127827248UL, 578401670UL, 14
99229308UL, 2581732444UL, 279715551UL, 809690877UL, 1438444015UL, 878935323
UL, 1495277039UL, 3417305339UL, 2858903785UL, 3074075088UL, 603749086UL, 23
70669734UL, 391683868UL, 3933465331UL, 2884128106UL, 1478317876UL, 18649883
35UL, 2925823809UL, 4133578805UL, 218104493UL, 368652174UL, 1998600344UL, 1
109346044UL, 1716435313UL, 415435111UL, 91393686UL, 2536620737UL, 144006857
3UL, 481874870UL, 142128108UL, 988825519UL, 2077118779UL, 2858045339UL, 406
8162251UL, 115593872UL, 1364244587UL, 3550167006UL, 3728768059UL, 177242368
5UL, 2504624145UL, 248732306UL, 1412607307UL, 4081166331UL, 154438218UL, 16
52901877UL, 3932533490UL, 3142799969UL, 3154073676UL, 3112018078UL, 2757873
595UL, 2364830126UL, 2855791484UL, 793851407UL, 507785167UL, 263713916UL, 4
060700051UL, 3291978358UL, 1584226715UL, 2546417990UL, 450747961UL, 2951067
700UL, 2706009093UL, 1788578194UL, 4030171132UL, 2610979903UL, 573420740UL,
4269115622UL, 2180305819UL, 2646894726UL, 716649335UL, 3875715683UL, 85342
8184UL, 2436760738UL, 4190071217UL, 2754423535UL, 540698101UL, 4082489821UL
, 741976046UL, 267559495UL, 1591532642UL, 2500610323UL, 3203248679UL, 14731
2102UL, 2772368222UL, 1412987047UL, 2295185573UL, 1932341300UL, 898396308UL
, 1837129999UL, 3113914292UL, 2613354524UL, 3141601915UL, 276087167UL, 1887
389351UL, 757801450UL, 3752353732UL, 2745818074UL, 1442953464UL, 3802648347
UL, 223728071UL, 2169947402UL, 1338125300UL, 3642174036UL, 2794462634UL, 23
26349851UL, 862746036UL, 3577092599UL, 627103363UL, 552173564UL, 4142604459
UL, 2310329406UL, 583522272UL, 189323282UL, 1217612313UL, 73550248UL, 24346
92829UL, 2757269706UL, 2392210091UL, 3032922600UL, 3573904125UL, 2897178037
UL, 2632631469UL, 3085332665UL, 3775619904UL, 2563291734UL, 1351375865UL, 4
043427793UL, 1803743084UL, 3112116579UL, 522940594UL, 2690374983UL, 2613871
529UL, 3810037031UL, 1765642390UL, 534554747UL, 1930852049UL, 2264349344UL,
1680024716UL, 2112340059UL, 3387475367UL, 2080916186UL, 75966494UL, 390737
8472UL, 2636491350UL, 2176128529UL, 2236616671UL, 2372987046UL, 2604001339U
L, 3893075234UL, 3495918635UL, 4116370522UL, 534929913UL, 3660102574UL, 203
0233939UL, 2759207091UL, 49347923UL, 987575186UL, 2566932710UL, 1566181275U
L, 3127827248UL, 578401670UL, 3731513754UL, 2581732444UL, 279715551UL, 8096
90877UL, 1438444015UL, 2185866850UL, 1495277039UL, 3417305339UL, 2858903785
UL, 3074075088UL, 4198538376UL, 2370669734UL, 391683868UL, 3933465331UL, 28
84128106UL, 1400216510UL, 1864988335UL, 2925823809UL, 4133578805UL, 2181044
93UL, 2798390374UL, 1998600344UL, 1109346044UL, 1716435313UL, 415435111UL,
1892535124UL, 2536620737UL, 1440068573UL, 481874870UL, 142128108UL, 3290827
40UL, 2077118779UL, 2858045339UL, 4068162251UL, 115593872UL, 2644000449UL,
3550167006UL, 3728768059UL, 1772423685UL, 2504624145UL, 2140118619UL, 14126
07307UL, 4081166331UL, 154438218UL, 1652901877UL, 3804911318UL, 3142799969U
L, 3154073676UL, 3112018078UL, 2757873595UL, 50297646UL, 2855791484UL, 7938
51407UL, 507785167UL, 263713916UL, 3324588195UL, 3291978358UL, 1584226715UL
, 2546417990UL, 450747961UL, 3455625012UL, 2706009093UL, 1788578194UL, 4030
171132UL, 2610979903UL, 3835380965UL, 4269115622UL, 2180305819UL, 264689472
6UL, 716649335UL, 2607142354UL, 853428184UL, 2436760738UL, 4190071217UL, 27
54423535UL, 456808691UL, 4082489821UL, 741976046UL, 267559495UL, 1591532642
UL, 2722205042UL, 3203248679UL, 147312102UL, 2772368222UL, 1412987047UL, 19
50543946UL, 1932341300UL, 898396308UL, 1837129999UL, 3113914292UL, 42861639
2UL, 3141601915UL, 276087167UL, 1887389351UL, 757801450UL, 963534966UL, 274
5818074UL, 1442953464UL, 3802648347UL, 223728071UL, 229039300UL, 1338125300
UL, 3642174036UL, 2794462634UL, 2326349851UL, 206115203UL, 3577092599UL, 62
7103363UL, 552173564UL, 4142604459UL, 1492461846UL, 583522272UL, 189323282U
L, 1217612313UL, 73550248UL, 3552211807UL, 2757269706UL, 2392210091UL, 3032
922600UL, 3573904125UL, 810640644UL, 2632631469UL, 3085332665UL, 3775619904
UL, 2563291734UL, 922608790UL, 4043427793UL, 1803743084UL, 3112116579UL, 52
2940594UL, 1785093944UL, 2613871529UL, 3810037031UL, 1765642390UL, 53455474
7UL, 3528050076UL, 2264349344UL, 1680024716UL, 2112340059UL, 3387475367UL,
3295682653UL, 75966494UL, 3907378472UL, 2636491350UL, 2176128529UL, 3574915
532UL, 2372987046UL, 2604001339UL, 3893075234UL, 3495918635UL, 1280296085UL
, 534929913UL, 3660102574UL, 2030233939UL, 2759207091UL, 299776535UL, 98757
5186UL, 2566932710UL, 1566181275UL, 3127827248UL, 3874691533UL, 3731513754U
L, 2581732444UL, 279715551UL, 809690877UL, 3100791084UL, 2185866850UL, 1495
277039UL, 3417305339UL, 2858903785UL, 1310351481UL, 4198538376UL, 237066973
4UL, 391683868UL, 3933465331UL, 2749085130UL, 1400216510UL, 1864988335UL, 2
925823809UL, 4133578805UL, 3352814594UL, 2798390374UL, 1998600344UL, 110934
6044UL, 1716435313UL, 1571752941UL, 1892535124UL, 2536620737UL, 1440068573U
L, 481874870UL, 2485033697UL, 329082740UL, 2077118779UL, 2858045339UL, 4068
162251UL, 3837440666UL, 2644000449UL, 3550167006UL, 3728768059UL, 177242368
5UL, 1176559812UL, 2140118619UL, 1412607307UL, 4081166331UL, 154438218UL, 2
902622972UL, 3804911318UL, 3142799969UL, 3154073676UL, 3112018078UL, 240339
1233UL, 50297646UL, 2855791484UL, 793851407UL, 507785167UL, 2351826747UL, 3
324588195UL, 3291978358UL, 1584226715UL, 2546417990UL, 746876926UL, 3455625
012UL, 2706009093UL, 1788578194UL, 4030171132UL, 3779307353UL, 3835380965UL
, 4269115622UL, 2180305819UL, 2646894726UL, 2602235234UL, 2607142354UL, 853
428184UL, 2436760738UL, 4190071217UL, 2066757692UL, 456808691UL, 4082489821
UL, 741976046UL, 267559495UL, 3001080633UL, 2722205042UL, 3203248679UL, 147
312102UL, 2772368222UL, 89950260UL, 1950543946UL, 1932341300UL, 898396308UL
, 1837129999UL, 947911286UL, 428616392UL, 3141601915UL, 276087167UL, 188738
9351UL, 2583987247UL, 963534966UL, 2745818074UL, 1442953464UL, 3802648347UL
, 4229124441UL, 229039300UL, 1338125300UL, 3642174036UL, 2794462634UL, 2472
155633UL, 206115203UL, 3577092599UL, 627103363UL, 552173564UL, 2586882739UL
, 1492461846UL, 583522272UL, 189323282UL, 1217612313UL, 3501549884UL, 35522
11807UL, 2757269706UL, 2392210091UL, 3032922600UL, 740675778UL, 810640644UL
, 2632631469UL, 3085332665UL, 3775619904UL, 3643289881UL, 922608790UL, 4043
427793UL, 1803743084UL, 3112116579UL, 2213337398UL, 1785093944UL, 261387152
9UL, 3810037031UL, 1765642390UL, 762472016UL, 3528050076UL, 2264349344UL, 1
680024716UL, 2112340059UL, 1372272974UL, 3295682653UL, 75966494UL, 39073784
72UL, 2636491350UL, 3117471955UL, 3574915532UL, 2372987046UL, 2604001339UL,
3893075234UL, 915576383UL, 1280296085UL, 534929913UL, 3660102574UL, 203023
3939UL, 346368350UL, 299776535UL, 987575186UL, 2566932710UL, 1566181275UL,
3535223896UL, 3874691533UL, 3731513754UL, 2581732444UL, 279715551UL, 245689
4951UL, 3100791084UL, 2185866850UL, 1495277039UL, 3417305339UL, 1618871086U
L, 1310351481UL, 4198538376UL, 2370669734UL, 391683868UL, 2009676005UL, 274
9085130UL, 1400216510UL, 1864988335UL, 2925823809UL, 58955107UL, 3352814594
UL, 2798390374UL, 1998600344UL, 1109346044UL, 3273979614UL, 1571752941UL, 1
892535124UL, 2536620737UL, 1440068573UL, 1174168447UL, 2485033697UL, 329082
740UL, 2077118779UL, 2858045339UL, 4062921629UL, 3837440666UL, 2644000449UL
, 3550167006UL, 3728768059UL, 2642133401UL, 1176559812UL, 2140118619UL, 141
2607307UL, 4081166331UL, 3124905304UL, 2902622972UL, 3804911318UL, 31427999
69UL, 3154073676UL, 1449454613UL, 2403391233UL, 50297646UL, 2855791484UL, 7
93851407UL, 3514201526UL, 2351826747UL, 3324588195UL, 3291978358UL, 1584226
715UL, 3636681672UL, 746876926UL, 3455625012UL, 2706009093UL, 1788578194UL,
3451519459UL, 3779307353UL, 3835380965UL, 4269115622UL, 2180305819UL, 3987
989524UL, 2602235234UL, 2607142354UL, 853428184UL, 2436760738UL, 2151617107
UL, 2066757692UL, 456808691UL, 4082489821UL, 741976046UL, 3590081269UL, 300
1080633UL, 2722205042UL, 3203248679UL, 147312102UL, 3432947806UL, 89950260U
L, 1950543946UL, 1932341300UL, 898396308UL, 3828432864UL, 947911286UL, 4286
16392UL, 3141601915UL, 276087167UL, 2517666433UL, 2583987247UL, 963534966UL
, 2745818074UL, 1442953464UL, 2223986807UL, 4229124441UL, 229039300UL, 1338
125300UL, 3642174036UL, 1053796945UL, 2472155633UL, 206115203UL, 3577092599
UL, 627103363UL, 1113276084UL, 2586882739UL, 1492461846UL, 583522272UL, 189
323282UL, 1490604990UL, 3501549884UL, 3552211807UL, 2757269706UL, 239221009
1UL, 3545407532UL, 740675778UL, 810640644UL, 2632631469UL, 3085332665UL, 75
5862267UL, 3643289881UL, 922608790UL, 4043427793UL, 1803743084UL, 195416663
0UL, 2213337398UL, 1785093944UL, 2613871529UL, 3810037031UL, 3042935707UL,
3162182177UL, 2791346436UL, 1901925289UL, 863100941UL, 3367519168UL, 197262
3238UL, 3664303070UL, 604922059UL, 3026817982UL, 1436412310UL, 4096180631UL
, 1597561857UL, 4206212303UL, 4127914332UL, 3228677359UL, 3985733659UL, 359
7290113UL, 4251197894UL, 3451370603UL, 609679338UL, 3360835257UL, 137223988
5UL, 638572328UL, 3806422284UL, 3974147336UL, 1804280837UL, 4209089291UL, 2
021797469UL, 3557188838UL, 409727186UL, 2114649178UL, 687702120UL, 25424459
92UL, 1235991799UL, 460479179UL, 2008348175UL, 887884478UL, 3942327811UL, 2
999928223UL, 4171339789UL, 2286339235UL, 1293442231UL, 1575942850UL, 761224
75UL, 1440527701UL, 2006558403UL, 1544148172UL, 895899367UL, 681826913UL, 4
094701935UL, 3995413790UL, 1027509154UL, 2264990896UL, 1938238113UL, 213430
250UL, 222469320UL, 609726517UL, 3581538106UL, 492802663UL, 120480843UL, 17
20004062UL, 1132674507UL, 911082758UL, 2909148131UL, 566658805UL, 396411444
5UL, 3483602509UL, 1793438750UL, 165562604UL, 3641830063UL, 2394205521UL, 3
404874822UL, 1672998096UL, 916151953UL, 1141264477UL, 3171661340UL, 3803396
219UL, 3018337382UL, 1863902683UL, 2474641928UL, 3250365071UL, 3897886220UL
, 1219701051UL, 51332576UL, 1358614881UL, 1707407492UL, 3670647816UL, 92335
7625UL, 343687395UL, 3991339686UL, 3913575403UL, 1267727936UL, 4001357856UL
, 3820224848UL, 2942896724UL, 3505936742UL, 1403285299UL, 1992762049UL, 567
748449UL, 2202721585UL, 2781324216UL, 1724850068UL, 2408314541UL, 307397581
3UL, 3992810029UL, 2475242354UL, 540562053UL, 2185198943UL, 3759352041UL, 3
373885614UL, 1132999410UL, 1097554565UL, 4089342358UL, 3239542922UL, 245174
8646UL, 407290679UL, 3188103200UL, 1708016248UL, 26848241UL, 2796711130UL,
3090711568UL, 4068389322UL, 3420916085UL, 3137567033UL, 2877819818UL, 22133
454UL, 4629160UL, 3703695249UL, 1920151708UL, 1175452162UL, 130015299UL, 33
31834713UL, 1099225384UL, 689254331UL, 1851083761UL, 2654970209UL, 32592979
36UL, 3742819314UL, 3524284766UL, 2291819083UL, 3494031861UL, 16242889UL, 3
545082774UL, 1997878108UL, 777447699UL, 4244916543UL, 3508640253UL, 3782278
393UL, 2107258964UL, 2139074576UL, 1383217899UL, 2337934322UL, 3181899620UL
, 1285955765UL, 2989610020UL, 3326862146UL, 1168587380UL, 801203532UL, 3020
809957UL, }, {3810471203UL, 1017064446UL, 1595207573UL, 441087832UL, 332674
6890UL, 3294064431UL, 167972517UL, 3625210015UL, 1011845006UL, 2980240819UL
, 1778354660UL, 3041730987UL, 1598611350UL, 2015169745UL, 2321724978UL, 339
0812967UL, 2432904511UL, 113261909UL, 3957193232UL, 3806115908UL, 296582892
9UL, 2035392295UL, 3500116619UL, 2881232416UL, 1672212265UL, 1607201428UL,
425148945UL, 1262591961UL, 2221781268UL, 4215047456UL, 2148245850UL, 278748
8981UL, 1077262192UL, 2085467561UL, 3053954888UL, 3584435116UL, 3013084787U
L, 287099941UL, 1290407232UL, 4078552287UL, 2658945475UL, 4251530898UL, 240
3086478UL, 2884923598UL, 3545110453UL, 4105390090UL, 343200643UL, 318988882
1UL, 4086304363UL, 3466483195UL, 259435633UL, 2846377387UL, 497258846UL, 27
2775541UL, 985737911UL, 2957688879UL, 2180784344UL, 3434619542UL, 364338483
8UL, 2228652440UL, 3107480718UL, 2208729807UL, 596436263UL, 3255120711UL, 3
248886970UL, 519242965UL, 602979109UL, 1619614UL, 1391563565UL, 56262588UL,
1584463910UL, 1849038201UL, 728022295UL, 848624947UL, 1813827408UL, 428214
945UL, 1246345586UL, 4213351865UL, 168985863UL, 456608054UL, 4277869380UL,
3886828599UL, 2264054549UL, 3110967170UL, 3138175314UL, 2649164828UL, 33693
78320UL, 3648350039UL, 3524848759UL, 1468470706UL, 3558859222UL, 2669673235
UL, 831851874UL, 4285651092UL, 4224147373UL, 1088456706UL, 231954609UL, 311
8005852UL, 225508069UL, 883105389UL, 856371341UL, 2001356578UL, 639336670UL
, 2363501707UL, 3622399552UL, 4024065226UL, 1093546838UL, 4263608561UL, 185
2072422UL, 425195042UL, 2441102396UL, 296426333UL, 384641750UL, 3559334435U
L, 1757327033UL, 1016016207UL, 3595686646UL, 24777793UL, 623926105UL, 21691
95923UL, 1779396793UL, 646997837UL, 1459728476UL, 2644865980UL, 1994581089U
L, 3956278544UL, 919592580UL, 2153558858UL, 2029633394UL, 3837501009UL, 401
6560170UL, 484838096UL, 3652199054UL, 1971790561UL, 605295089UL, 637470291U
L, 278970544UL, 3574824693UL, 295866521UL, 1755035156UL, 2542341803UL, 1588
716357UL, 1502596918UL, 4124554133UL, 3547049843UL, 1768033045UL, 153173463
0UL, 101448323UL, 3233017580UL, 1793222944UL, 3187853500UL, 186000900UL, 80
3444571UL, 2820254958UL, 2009384608UL, 2384668855UL, 2222812920UL, 63360866
5UL, 2028480056UL, 1258028235UL, 545095949UL, 3810471203UL, 1017064446UL, 1
595207573UL, 441087832UL, 899068662UL, 3294064431UL, 167972517UL, 362521001
5UL, 1011845006UL, 3951305793UL, 1778354660UL, 3041730987UL, 1598611350UL,
2015169745UL, 1885149424UL, 3390812967UL, 2432904511UL, 113261909UL, 395719
3232UL, 3953443155UL, 2965828929UL, 2035392295UL, 3500116619UL, 2881232416U
L, 329153573UL, 1607201428UL, 425148945UL, 1262591961UL, 2221781268UL, 7802
8761UL, 2148245850UL, 2787488981UL, 1077262192UL, 2085467561UL, 647235899UL
, 3584435116UL, 3013084787UL, 287099941UL, 1290407232UL, 1467385694UL, 2658
945475UL, 4251530898UL, 2403086478UL, 2884923598UL, 3489351040UL, 410539009
0UL, 343200643UL, 3189888821UL, 4086304363UL, 3521512280UL, 259435633UL, 28
46377387UL, 497258846UL, 272775541UL, 1367093111UL, 2957688879UL, 218078434
4UL, 3434619542UL, 3643384838UL, 411877686UL, 3107480718UL, 2208729807UL, 5
96436263UL, 3255120711UL, 584605030UL, 519242965UL, 602979109UL, 1619614UL,
1391563565UL, 3902518209UL, 1584463910UL, 1849038201UL, 728022295UL, 84862
4947UL, 1932969318UL, 428214945UL, 1246345586UL, 4213351865UL, 168985863UL,
2770345237UL, 4277869380UL, 3886828599UL, 2264054549UL, 3110967170UL, 2953
581033UL, 2649164828UL, 3369378320UL, 3648350039UL, 3524848759UL, 238035397
7UL, 3558859222UL, 2669673235UL, 831851874UL, 4285651092UL, 1214052447UL, 1
088456706UL, 231954609UL, 3118005852UL, 225508069UL, 1766983646UL, 85637134
1UL, 2001356578UL, 639336670UL, 2363501707UL, 1782816591UL, 4024065226UL, 1
093546838UL, 4263608561UL, 1852072422UL, 1149716600UL, 2441102396UL, 296426
333UL, 384641750UL, 3559334435UL, 2391309970UL, 1016016207UL, 3595686646UL,
24777793UL, 623926105UL, 362098678UL, 1779396793UL, 646997837UL, 145972847
6UL, 2644865980UL, 3238673748UL, 3956278544UL, 919592580UL, 2153558858UL, 2
029633394UL, 115778559UL, 4016560170UL, 484838096UL, 3652199054UL, 19717905
61UL, 737357475UL, 637470291UL, 278970544UL, 3574824693UL, 295866521UL, 398
9745853UL, 2542341803UL, 1588716357UL, 1502596918UL, 4124554133UL, 30168497
44UL, 1768033045UL, 1531734630UL, 101448323UL, 3233017580UL, 4157527581UL,
3187853500UL, 186000900UL, 803444571UL, 2820254958UL, 1980528062UL, 2384668
855UL, 2222812920UL, 633608665UL, 2028480056UL, 3166710281UL, 545095949UL,
3810471203UL, 1017064446UL, 1595207573UL, 693962828UL, 899068662UL, 3294064
431UL, 167972517UL, 3625210015UL, 1486040398UL, 3951305793UL, 1778354660UL,
3041730987UL, 1598611350UL, 2859363132UL, 1885149424UL, 3390812967UL, 2432
904511UL, 113261909UL, 664880478UL, 3953443155UL, 2965828929UL, 2035392295U
L, 3500116619UL, 558081801UL, 329153573UL, 1607201428UL, 425148945UL, 12625
91961UL, 3716247699UL, 78028761UL, 2148245850UL, 2787488981UL, 1077262192UL
, 4206362947UL, 647235899UL, 3584435116UL, 3013084787UL, 287099941UL, 25367
81098UL, 1467385694UL, 2658945475UL, 4251530898UL, 2403086478UL, 3075072413
UL, 3489351040UL, 4105390090UL, 343200643UL, 3189888821UL, 2540485172UL, 35
21512280UL, 259435633UL, 2846377387UL, 497258846UL, 2442427327UL, 136709311
1UL, 2957688879UL, 2180784344UL, 3434619542UL, 1593967423UL, 411877686UL, 3
107480718UL, 2208729807UL, 596436263UL, 1048686529UL, 584605030UL, 51924296
5UL, 602979109UL, 1619614UL, 2072745381UL, 3902518209UL, 1584463910UL, 1849
038201UL, 728022295UL, 846033949UL, 1932969318UL, 428214945UL, 1246345586UL
, 4213351865UL, 1066373275UL, 2770345237UL, 4277869380UL, 3886828599UL, 226
4054549UL, 1877859690UL, 2953581033UL, 2649164828UL, 3369378320UL, 36483500
39UL, 2537763389UL, 2380353977UL, 3558859222UL, 2669673235UL, 831851874UL,
522748140UL, 1214052447UL, 1088456706UL, 231954609UL, 3118005852UL, 1381269
315UL, 1766983646UL, 856371341UL, 2001356578UL, 639336670UL, 667275675UL, 1
782816591UL, 4024065226UL, 1093546838UL, 4263608561UL, 2057337961UL, 114971
6600UL, 2441102396UL, 296426333UL, 384641750UL, 340523210UL, 2391309970UL,
1016016207UL, 3595686646UL, 24777793UL, 3094832341UL, 362098678UL, 17793967
93UL, 646997837UL, 1459728476UL, 1169681568UL, 3238673748UL, 3956278544UL,
919592580UL, 2153558858UL, 388335108UL, 115778559UL, 4016560170UL, 48483809
6UL, 3652199054UL, 1764858181UL, 737357475UL, 637470291UL, 278970544UL, 357
4824693UL, 3671458900UL, 3989745853UL, 2542341803UL, 1588716357UL, 15025969
18UL, 2102871406UL, 3016849744UL, 1768033045UL, 1531734630UL, 101448323UL,
3964942332UL, 4157527581UL, 3187853500UL, 186000900UL, 803444571UL, 3425652
083UL, 1980528062UL, 2384668855UL, 2222812920UL, 633608665UL, 3035373876UL,
3166710281UL, 545095949UL, 3810471203UL, 1017064446UL, 669282349UL, 693962
828UL, 899068662UL, 3294064431UL, 167972517UL, 2007256988UL, 1486040398UL,
3951305793UL, 1778354660UL, 3041730987UL, 2827768941UL, 2859363132UL, 18851
49424UL, 3390812967UL, 2432904511UL, 3700915653UL, 664880478UL, 3953443155U
L, 2965828929UL, 2035392295UL, 1461208330UL, 558081801UL, 329153573UL, 1607
201428UL, 425148945UL, 1700881129UL, 3716247699UL, 78028761UL, 2148245850UL
, 2787488981UL, 2706775080UL, 4206362947UL, 647235899UL, 3584435116UL, 3013
084787UL, 2958545221UL, 2536781098UL, 1467385694UL, 2658945475UL, 425153089
8UL, 2241012567UL, 3075072413UL, 3489351040UL, 4105390090UL, 343200643UL, 4
90164649UL, 2540485172UL, 3521512280UL, 259435633UL, 2846377387UL, 40736118
31UL, 2442427327UL, 1367093111UL, 2957688879UL, 2180784344UL, 1835510773UL,
1593967423UL, 411877686UL, 3107480718UL, 2208729807UL, 3306732468UL, 10486
86529UL, 584605030UL, 519242965UL, 602979109UL, 2978864605UL, 2072745381UL,
3902518209UL, 1584463910UL, 1849038201UL, 3284115169UL, 846033949UL, 19329
69318UL, 428214945UL, 1246345586UL, 194166002UL, 1066373275UL, 2770345237UL
, 4277869380UL, 3886828599UL, 1874087886UL, 1877859690UL, 2953581033UL, 264
9164828UL, 3369378320UL, 4145454028UL, 2537763389UL, 2380353977UL, 35588592
22UL, 2669673235UL, 739345884UL, 522748140UL, 1214052447UL, 1088456706UL, 2
31954609UL, 3605603781UL, 1381269315UL, 1766983646UL, 856371341UL, 20013565
78UL, 2049940324UL, 667275675UL, 1782816591UL, 4024065226UL, 1093546838UL,
152524382UL, 2057337961UL, 1149716600UL, 2441102396UL, 296426333UL, 3195130
788UL, 340523210UL, 2391309970UL, 1016016207UL, 3595686646UL, 180492441UL,
3094832341UL, 362098678UL, 1779396793UL, 646997837UL, 2458167607UL, 1169681
568UL, 3238673748UL, 3956278544UL, 919592580UL, 3421005218UL, 388335108UL,
115778559UL, 4016560170UL, 484838096UL, 2649676374UL, 1764858181UL, 7373574
75UL, 637470291UL, 278970544UL, 2236401278UL, 3671458900UL, 3989745853UL, 2
542341803UL, 1588716357UL, 1241570134UL, 2102871406UL, 3016849744UL, 176803
3045UL, 1531734630UL, 1765654724UL, 3964942332UL, 4157527581UL, 3187853500U
L, 186000900UL, 2189716659UL, 3425652083UL, 1980528062UL, 2384668855UL, 222
2812920UL, 3955466207UL, 2426547616UL, 3846752458UL, 3015538636UL, 23425933
65UL, 3613176865UL, 3484860981UL, 4278370194UL, 1979143878UL, 1159739458UL,
3714038404UL, 396530346UL, 3276617756UL, 3293940597UL, 4050183149UL, 14185
71985UL, 402563753UL, 2702853013UL, 2289900621UL, 2267058511UL, 3482161995U
L, 3375026019UL, 1988640267UL, 3674438074UL, 4124612310UL, 1057883705UL, 43
4730475UL, 3210959778UL, 4102029739UL, 2140938750UL, 3176753074UL, 23569715
12UL, 3969685288UL, 1556275580UL, 2648433428UL, 3959375381UL, 478841344UL,
1496991528UL, 3309714981UL, 569990368UL, 3660587501UL, 2550379574UL, 117751
9842UL, 2652707373UL, 543943404UL, 1912551128UL, 2278132032UL, 1484596780UL
, 3570913985UL, 2982401320UL, 1413776035UL, 3177275459UL, 3036211597UL, 109
1740466UL, 3448424311UL, 1445187645UL, 3205024875UL, 3135795254UL, 82373872
9UL, 3742134467UL, 4066657438UL, 1226311678UL, 2403605393UL, 537573634UL, 3
457409768UL, 1940233423UL, 1761431281UL, 1129427309UL, 2443661283UL, 320081
4257UL, 4094866249UL, 2666869754UL, 604785127UL, 2213464116UL, 3002782918UL
, 468024929UL, 2490681314UL, 3666681384UL, 1583346053UL, 3049668798UL, 3592
153237UL, 2573082448UL, 3082970021UL, 1461796708UL, 832526980UL, 3728763274
UL, 355291229UL, 4029588456UL, 832358279UL, 2125298737UL, 3681181038UL, 324
5535160UL, 1333342738UL, 1868897492UL, 446790068UL, 1278093154UL, 209011861
5UL, 4158925515UL, 4062165914UL, 822726809UL, 1154960183UL, 286518382UL, 11
70424276UL, 2554691236UL, 3674133415UL, 2765714969UL, 2330865375UL, 1908307
334UL, 3537287082UL, 410252600UL, 3977128218UL, 424210327UL, 2919071615UL,
2715518134UL, 64568844UL, 480972649UL, 2488797168UL, 1302817038UL, 22139952
65UL, 4229997295UL, 2200797852UL, 109368057UL, 3033807022UL, 1907400078UL,
645977948UL, 1410909090UL, 3700787906UL, 3375062371UL, 629087832UL, 1344281
719UL, 4249981139UL, 3457543297UL, 1218556849UL, 864222854UL, 1458445945UL,
914545469UL, 3451164212UL, 1088025757UL, 1129933985UL, 953788883UL, 240617
2924UL, 170364546UL, 3505490646UL, 1027553899UL, 2864067776UL, 436854871UL,
1342782209UL, 761167471UL, 2660173631UL, 4159507498UL, 4172028400UL, 24422
54644UL, 2110123720UL, 2315991253UL, 873066601UL, 1725470559UL, 3831299052U
L, 678672031UL, 1585431329UL, 3495750550UL, }, {1998393432UL, 2665389278UL,
3989307699UL, 3267631636UL, 3861682977UL, 3243522970UL, 1243992413UL, 2200
497260UL, 3821883021UL, 4187123083UL, 3451270040UL, 3044132745UL, 210128724
9UL, 2340839784UL, 227040990UL, 1724350416UL, 3228881240UL, 3123386528UL, 4
279362126UL, 3098224464UL, 2635534069UL, 3622906431UL, 206207480UL, 1894245
533UL, 2152374527UL, 1011223653UL, 7271757UL, 2972858087UL, 207942127UL, 33
55362797UL, 2593296740UL, 174093751UL, 3713822176UL, 4212355586UL, 33356052
24UL, 1171716408UL, 2867257989UL, 1522213957UL, 2016192462UL, 4229688395UL,
2174928148UL, 1468226225UL, 3938290338UL, 493240317UL, 3229423344UL, 25854
75729UL, 3112454413UL, 1881171707UL, 2555908056UL, 1997546352UL, 380428329U
L, 3341885423UL, 3307510279UL, 3519476676UL, 3613100811UL, 2555826262UL, 10
9341943UL, 2382715395UL, 3883409616UL, 1593551879UL, 2163678014UL, 33797831
37UL, 2810374300UL, 1516064864UL, 561144874UL, 316017838UL, 1899237567UL, 7
0857401UL, 3435185465UL, 4234661323UL, 2580352177UL, 32879620UL, 4171670150
UL, 1986234067UL, 3589478191UL, 2073132526UL, 2603712175UL, 377997975UL, 24
74419397UL, 3110698341UL, 812664089UL, 1778922726UL, 1686111212UL, 97278413
8UL, 3936486236UL, 2711468739UL, 423435866UL, 1661961159UL, 802312780UL, 18
68728136UL, 1760295704UL, 3357409828UL, 215039860UL, 683184627UL, 401911106
4UL, 3609261689UL, 2167554309UL, 1831085281UL, 3389357802UL, 4193421575UL,
628277197UL, 2900207619UL, 993609502UL, 3429627083UL, 2636466084UL, 3652352
199UL, 1780133580UL, 1670387713UL, 4086070210UL, 4004540729UL, 783029246UL,
2165667566UL, 1739001057UL, 377639972UL, 1102689625UL, 1945278055UL, 39411
85940UL, 3685368326UL, 1881761572UL, 2201338934UL, 801752UL, 2729497735UL,
492844690UL, 2998826141UL, 3844964457UL, 3679088359UL, 2196391660UL, 422226
9404UL, 357321611UL, 3727170055UL, 1819614072UL, 2348798457UL, 4294366646UL
, 1952884323UL, 3574345216UL, 2040734807UL, 232392443UL, 4183498179UL, 2614
866055UL, 112120292UL, 3624018350UL, 3340709877UL, 3097507723UL, 1268833488
UL, 3570501956UL, 3338260086UL, 293812421UL, 3683058169UL, 1147960351UL, 28
3731890UL, 2171233479UL, 1830154455UL, 4036602681UL, 1996981699UL, 13280383
4UL, 40256165UL, 2158110401UL, 3575159090UL, 3196553513UL, 3559872992UL, 34
02884675UL, 1998393432UL, 2665389278UL, 3989307699UL, 3267631636UL, 3617519
767UL, 3243522970UL, 1243992413UL, 2200497260UL, 3821883021UL, 3715729085UL
, 3451270040UL, 3044132745UL, 2101287249UL, 2340839784UL, 3173635549UL, 172
4350416UL, 3228881240UL, 3123386528UL, 4279362126UL, 2287520039UL, 26355340
69UL, 3622906431UL, 206207480UL, 1894245533UL, 96723416UL, 1011223653UL, 72
71757UL, 2972858087UL, 207942127UL, 1668335352UL, 2593296740UL, 174093751UL
, 3713822176UL, 4212355586UL, 49226793UL, 1171716408UL, 2867257989UL, 15222
13957UL, 2016192462UL, 118712412UL, 2174928148UL, 1468226225UL, 3938290338U
L, 493240317UL, 3788174304UL, 2585475729UL, 3112454413UL, 1881171707UL, 255
5908056UL, 3351139844UL, 380428329UL, 3341885423UL, 3307510279UL, 351947667
6UL, 1368994724UL, 2555826262UL, 109341943UL, 2382715395UL, 3883409616UL, 1
561509458UL, 2163678014UL, 3379783137UL, 2810374300UL, 1516064864UL, 231325
2274UL, 316017838UL, 1899237567UL, 70857401UL, 3435185465UL, 2585770746UL,
2580352177UL, 32879620UL, 4171670150UL, 1986234067UL, 3317983509UL, 2073132
526UL, 2603712175UL, 377997975UL, 2474419397UL, 908728599UL, 812664089UL, 1
778922726UL, 1686111212UL, 972784138UL, 1992540005UL, 2711468739UL, 4234358
66UL, 1661961159UL, 802312780UL, 907108769UL, 1760295704UL, 3357409828UL, 2
15039860UL, 683184627UL, 2806826652UL, 3609261689UL, 2167554309UL, 18310852
81UL, 3389357802UL, 2755692689UL, 628277197UL, 2900207619UL, 993609502UL, 3
429627083UL, 3605915742UL, 3652352199UL, 1780133580UL, 1670387713UL, 408607
0210UL, 3717326627UL, 783029246UL, 2165667566UL, 1739001057UL, 377639972UL,
2355216626UL, 1945278055UL, 3941185940UL, 3685368326UL, 1881761572UL, 4024
097818UL, 801752UL, 2729497735UL, 492844690UL, 2998826141UL, 2719601647UL,
3679088359UL, 2196391660UL, 4222269404UL, 357321611UL, 1319821972UL, 181961
4072UL, 2348798457UL, 4294366646UL, 1952884323UL, 3573866689UL, 2040734807U
L, 232392443UL, 4183498179UL, 2614866055UL, 440744432UL, 3624018350UL, 3340
709877UL, 3097507723UL, 1268833488UL, 224895395UL, 3338260086UL, 293812421U
L, 3683058169UL, 1147960351UL, 3433425235UL, 2171233479UL, 1830154455UL, 40
36602681UL, 1996981699UL, 2875889721UL, 40256165UL, 2158110401UL, 357515909
0UL, 3196553513UL, 1094082574UL, 3402884675UL, 1998393432UL, 2665389278UL,
3989307699UL, 4068940467UL, 3617519767UL, 3243522970UL, 1243992413UL, 22004
97260UL, 441678457UL, 3715729085UL, 3451270040UL, 3044132745UL, 2101287249U
L, 2181502237UL, 3173635549UL, 1724350416UL, 3228881240UL, 3123386528UL, 19
68352124UL, 2287520039UL, 2635534069UL, 3622906431UL, 206207480UL, 20650935
99UL, 96723416UL, 1011223653UL, 7271757UL, 2972858087UL, 1094044749UL, 1668
335352UL, 2593296740UL, 174093751UL, 3713822176UL, 2887397643UL, 49226793UL
, 1171716408UL, 2867257989UL, 1522213957UL, 984348433UL, 118712412UL, 21749
28148UL, 1468226225UL, 3938290338UL, 2279430036UL, 3788174304UL, 2585475729
UL, 3112454413UL, 1881171707UL, 4247636500UL, 3351139844UL, 380428329UL, 33
41885423UL, 3307510279UL, 2887754196UL, 1368994724UL, 2555826262UL, 1093419
43UL, 2382715395UL, 2836761616UL, 1561509458UL, 2163678014UL, 3379783137UL,
2810374300UL, 1635278016UL, 2313252274UL, 316017838UL, 1899237567UL, 70857
401UL, 3481535811UL, 2585770746UL, 2580352177UL, 32879620UL, 4171670150UL,
2248003250UL, 3317983509UL, 2073132526UL, 2603712175UL, 377997975UL, 328616
2818UL, 908728599UL, 812664089UL, 1778922726UL, 1686111212UL, 4024815755UL,
1992540005UL, 2711468739UL, 423435866UL, 1661961159UL, 2257259057UL, 90710
8769UL, 1760295704UL, 3357409828UL, 215039860UL, 3917391198UL, 2806826652UL
, 3609261689UL, 2167554309UL, 1831085281UL, 4238043113UL, 2755692689UL, 628
277197UL, 2900207619UL, 993609502UL, 2036092353UL, 3605915742UL, 3652352199
UL, 1780133580UL, 1670387713UL, 118446953UL, 3717326627UL, 783029246UL, 216
5667566UL, 1739001057UL, 203160626UL, 2355216626UL, 1945278055UL, 394118594
0UL, 3685368326UL, 546361979UL, 4024097818UL, 801752UL, 2729497735UL, 49284
4690UL, 1023017124UL, 2719601647UL, 3679088359UL, 2196391660UL, 4222269404U
L, 621859651UL, 1319821972UL, 1819614072UL, 2348798457UL, 4294366646UL, 111
4888560UL, 3573866689UL, 2040734807UL, 232392443UL, 4183498179UL, 395950460
9UL, 440744432UL, 3624018350UL, 3340709877UL, 3097507723UL, 3613295037UL, 2
24895395UL, 3338260086UL, 293812421UL, 3683058169UL, 1655305863UL, 34334252
35UL, 2171233479UL, 1830154455UL, 4036602681UL, 3731384097UL, 2875889721UL,
40256165UL, 2158110401UL, 3575159090UL, 1847744924UL, 1094082574UL, 340288
4675UL, 1998393432UL, 2665389278UL, 3781866777UL, 4068940467UL, 3617519767U
L, 3243522970UL, 1243992413UL, 2723708256UL, 441678457UL, 3715729085UL, 345
1270040UL, 3044132745UL, 4013832842UL, 2181502237UL, 3173635549UL, 17243504
16UL, 3228881240UL, 2092292494UL, 1968352124UL, 2287520039UL, 2635534069UL,
3622906431UL, 3186333458UL, 2065093599UL, 96723416UL, 1011223653UL, 727175
7UL, 649658033UL, 1094044749UL, 1668335352UL, 2593296740UL, 174093751UL, 41
59420309UL, 2887397643UL, 49226793UL, 1171716408UL, 2867257989UL, 259007795
3UL, 984348433UL, 118712412UL, 2174928148UL, 1468226225UL, 1065322711UL, 22
79430036UL, 3788174304UL, 2585475729UL, 3112454413UL, 3932517386UL, 4247636
500UL, 3351139844UL, 380428329UL, 3341885423UL, 1285273904UL, 2887754196UL,
1368994724UL, 2555826262UL, 109341943UL, 2318470582UL, 2836761616UL, 15615
09458UL, 2163678014UL, 3379783137UL, 674658583UL, 1635278016UL, 2313252274U
L, 316017838UL, 1899237567UL, 2192372173UL, 3481535811UL, 2585770746UL, 258
0352177UL, 32879620UL, 300323274UL, 2248003250UL, 3317983509UL, 2073132526U
L, 2603712175UL, 3086543917UL, 3286162818UL, 908728599UL, 812664089UL, 1778
922726UL, 2263290659UL, 4024815755UL, 1992540005UL, 2711468739UL, 423435866
UL, 819027349UL, 2257259057UL, 907108769UL, 1760295704UL, 3357409828UL, 114
2221093UL, 3917391198UL, 2806826652UL, 3609261689UL, 2167554309UL, 41081558
75UL, 4238043113UL, 2755692689UL, 628277197UL, 2900207619UL, 3041719497UL,
2036092353UL, 3605915742UL, 3652352199UL, 1780133580UL, 2397410862UL, 11844
6953UL, 3717326627UL, 783029246UL, 2165667566UL, 2721690354UL, 203160626UL,
2355216626UL, 1945278055UL, 3941185940UL, 2768842108UL, 546361979UL, 40240
97818UL, 801752UL, 2729497735UL, 4045063232UL, 1023017124UL, 2719601647UL,
3679088359UL, 2196391660UL, 2666107451UL, 621859651UL, 1319821972UL, 181961
4072UL, 2348798457UL, 3555102623UL, 1114888560UL, 3573866689UL, 2040734807U
L, 232392443UL, 3359040541UL, 3959504609UL, 440744432UL, 3624018350UL, 3340
709877UL, 1477919696UL, 3613295037UL, 224895395UL, 3338260086UL, 293812421U
L, 4210187101UL, 1655305863UL, 3433425235UL, 2171233479UL, 1830154455UL, 41
50241150UL, 3731384097UL, 2875889721UL, 40256165UL, 2158110401UL, 335024668
7UL, 455561037UL, 2250400255UL, 3192153445UL, 3258870230UL, 1500391873UL, 4
142878334UL, 1155955691UL, 1483275844UL, 4189436981UL, 323745948UL, 1976017
426UL, 2804626790UL, 2717553615UL, 2315409034UL, 954508235UL, 3845175920UL,
3999878682UL, 1247696432UL, 1743319509UL, 2998248398UL, 3694350012UL, 4072
006361UL, 191306987UL, 2816321878UL, 1324077734UL, 1083060006UL, 3406855480
UL, 1619622379UL, 2160350UL, 3302238190UL, 3368021261UL, 3685228564UL, 3863
934685UL, 771728612UL, 854205233UL, 2304696695UL, 421449207UL, 1265752117UL
, 3852292419UL, 305345788UL, 1540622105UL, 1904883477UL, 833469256UL, 13440
6680UL, 3012455058UL, 4035477953UL, 2925192459UL, 1559200592UL, 3851612860U
L, 718484562UL, 1377960276UL, 1586892849UL, 1361298269UL, 3417917896UL, 128
1324499UL, 1012538763UL, 1350578667UL, 3946475598UL, 2982283954UL, 35487928
04UL, 284542749UL, 1194648577UL, 3087899716UL, 3966595444UL, 2088330116UL,
3641652062UL, 327128507UL, 593906557UL, 1092448919UL, 2459189516UL, 4053392
241UL, 3356198248UL, 2352376508UL, 470648997UL, 1017041256UL, 3234172340UL,
3928191489UL, 3266226858UL, 4219289150UL, 1229098319UL, 4275351308UL, 2720
777751UL, 3566728718UL, 638322822UL, 2369792461UL, 2869492261UL, 3120083828
UL, 1890399556UL, 3309991008UL, 3785452464UL, 4128660314UL, 3726791982UL, 1
67177896UL, 461294981UL, 3988638998UL, 2937794823UL, 3981029822UL, 11116814
02UL, 2015965721UL, 7261806UL, 2669786265UL, 1083582734UL, 3270228881UL, 38
92235938UL, 2695872715UL, 4246051290UL, 3214293333UL, 343604199UL, 32156048
88UL, 661024127UL, 2931754053UL, 3787840039UL, 2053363765UL, 363432336UL, 1
12334132UL, 2871797223UL, 138911320UL, 3981126938UL, 2027332192UL, 18047306
44UL, 590150270UL, 641538574UL, 6802174UL, 3551446076UL, 3908480472UL, 1004
531022UL, 2097228524UL, 1919074232UL, 154482247UL, 121437972UL, 1215661323U
L, 1178068273UL, 1097220699UL, 2823681422UL, 262636065UL, 2943371149UL, 176
8780720UL, 3866040605UL, 1855991583UL, 3988248086UL, 629223947UL, 338061233
0UL, 3552916762UL, 197596340UL, 573801686UL, 2049230598UL, 2910471867UL, 26
86314264UL, 1726228846UL, 3516983332UL, 726840185UL, 1241204222UL, 22375743
17UL, 70568042UL, 1932610099UL, 2221862221UL, 1510378092UL, 4050391637UL, 4
077539568UL, }, {3872117793UL, 803220151UL, 70843412UL, 1661103032UL, 19768
11457UL, 2186373604UL, 564259972UL, 1475436923UL, 2260980893UL, 4245534505U
L, 1075107552UL, 3692990573UL, 370098873UL, 4045905424UL, 2420395420UL, 233
2395402UL, 207483321UL, 622317750UL, 3004242500UL, 833623111UL, 3151161301U
L, 1629139881UL, 352228793UL, 2439953368UL, 3183333619UL, 2703537080UL, 321
8957129UL, 3164695888UL, 1741641842UL, 963394141UL, 4241612717UL, 103447678
4UL, 2035880432UL, 3977821313UL, 1543311495UL, 3010014356UL, 1638490901UL,
2364265378UL, 3420329129UL, 333361555UL, 1133565821UL, 1450937015UL, 616059
115UL, 3216393887UL, 3041978455UL, 3990855695UL, 1238628750UL, 512746184UL,
3256670217UL, 1616316512UL, 2791405051UL, 93474487UL, 2865892488UL, 190147
1398UL, 2930857966UL, 2178431077UL, 2325598341UL, 3189256113UL, 1302432091U
L, 808592927UL, 2945846737UL, 3487931071UL, 2018175258UL, 752981057UL, 1097
082589UL, 1307115286UL, 175147508UL, 3611190164UL, 850238914UL, 3318706185U
L, 199743319UL, 328621708UL, 3183670050UL, 3609998315UL, 4075306371UL, 3554
549067UL, 2119566187UL, 1498503842UL, 1261870696UL, 2216745780UL, 950288337
UL, 1117344941UL, 2150569143UL, 2899286760UL, 1594966374UL, 888858617UL, 35
840654UL, 2829539211UL, 2511395669UL, 3607190544UL, 3278412778UL, 224989590
7UL, 1320858068UL, 3576889788UL, 266766189UL, 1522426851UL, 1903494122UL, 1
928370573UL, 2628132591UL, 3322025904UL, 220280169UL, 433606853UL, 14289614
79UL, 986074592UL, 2128892987UL, 467697583UL, 1616913929UL, 325674890UL, 44
4442578UL, 649166208UL, 1689709565UL, 1493452467UL, 2222122038UL, 121114616
UL, 2134348225UL, 3512035688UL, 1283058921UL, 4230441398UL, 3701238559UL, 3
37534132UL, 1418548715UL, 1190006478UL, 500654385UL, 1766924757UL, 19446807
46UL, 940574010UL, 922744002UL, 186142284UL, 3131162902UL, 1693891092UL, 30
31823448UL, 2143051534UL, 1429025284UL, 1487843160UL, 3606456133UL, 2079235
652UL, 2447285474UL, 2669283767UL, 3232117829UL, 2490054343UL, 3225501736UL
, 2911340385UL, 382319031UL, 1516937595UL, 622543191UL, 1388990570UL, 17491
79860UL, 1924483707UL, 2593474505UL, 472539197UL, 122872799UL, 2586347240UL
, 880588515UL, 4046335279UL, 1712182607UL, 4270737941UL, 1336703451UL, 3390
078162UL, 382216945UL, 3733326081UL, 460422073UL, 3872117793UL, 803220151UL
, 70843412UL, 1661103032UL, 250339760UL, 2186373604UL, 564259972UL, 1475436
923UL, 2260980893UL, 657986735UL, 1075107552UL, 3692990573UL, 370098873UL,
4045905424UL, 3201950123UL, 2332395402UL, 207483321UL, 622317750UL, 3004242
500UL, 3732213278UL, 3151161301UL, 1629139881UL, 352228793UL, 2439953368UL,
3572618926UL, 2703537080UL, 3218957129UL, 3164695888UL, 1741641842UL, 6859
33373UL, 4241612717UL, 1034476784UL, 2035880432UL, 3977821313UL, 3855995181
UL, 3010014356UL, 1638490901UL, 2364265378UL, 3420329129UL, 2355603679UL, 1
133565821UL, 1450937015UL, 616059115UL, 3216393887UL, 1733804102UL, 3990855
695UL, 1238628750UL, 512746184UL, 3256670217UL, 2651059231UL, 2791405051UL,
93474487UL, 2865892488UL, 1901471398UL, 2113461797UL, 2178431077UL, 232559
8341UL, 3189256113UL, 1302432091UL, 2986990416UL, 2945846737UL, 3487931071U
L, 2018175258UL, 752981057UL, 2428033310UL, 1307115286UL, 175147508UL, 3611
190164UL, 850238914UL, 1033628405UL, 199743319UL, 328621708UL, 3183670050UL
, 3609998315UL, 4024297327UL, 3554549067UL, 2119566187UL, 1498503842UL, 126
1870696UL, 290361143UL, 950288337UL, 1117344941UL, 2150569143UL, 2899286760
UL, 168826051UL, 888858617UL, 35840654UL, 2829539211UL, 2511395669UL, 28908
82060UL, 3278412778UL, 2249895907UL, 1320858068UL, 3576889788UL, 1794920145
UL, 1522426851UL, 1903494122UL, 1928370573UL, 2628132591UL, 1251697758UL, 2
20280169UL, 433606853UL, 1428961479UL, 986074592UL, 2707115661UL, 467697583
UL, 1616913929UL, 325674890UL, 444442578UL, 122781510UL, 1689709565UL, 1493
452467UL, 2222122038UL, 121114616UL, 3425723636UL, 3512035688UL, 1283058921
UL, 4230441398UL, 3701238559UL, 1646155473UL, 1418548715UL, 1190006478UL, 5
00654385UL, 1766924757UL, 3920475367UL, 940574010UL, 922744002UL, 186142284
UL, 3131162902UL, 54639113UL, 3031823448UL, 2143051534UL, 1429025284UL, 148
7843160UL, 4152687885UL, 2079235652UL, 2447285474UL, 2669283767UL, 32321178
29UL, 1601035152UL, 3225501736UL, 2911340385UL, 382319031UL, 1516937595UL,
3508441679UL, 1388990570UL, 1749179860UL, 1924483707UL, 2593474505UL, 28354
03456UL, 122872799UL, 2586347240UL, 880588515UL, 4046335279UL, 2958058367UL
, 4270737941UL, 1336703451UL, 3390078162UL, 382216945UL, 450517882UL, 46042
2073UL, 3872117793UL, 803220151UL, 70843412UL, 2066343874UL, 250339760UL, 2
186373604UL, 564259972UL, 1475436923UL, 1683787449UL, 657986735UL, 10751075
52UL, 3692990573UL, 370098873UL, 2615082840UL, 3201950123UL, 2332395402UL,
207483321UL, 622317750UL, 2655424371UL, 3732213278UL, 3151161301UL, 1629139
881UL, 352228793UL, 3236724760UL, 3572618926UL, 2703537080UL, 3218957129UL,
3164695888UL, 9775065UL, 685933373UL, 4241612717UL, 1034476784UL, 20358804
32UL, 1621920075UL, 3855995181UL, 3010014356UL, 1638490901UL, 2364265378UL,
1509475888UL, 2355603679UL, 1133565821UL, 1450937015UL, 616059115UL, 36661
88236UL, 1733804102UL, 3990855695UL, 1238628750UL, 512746184UL, 3900473826U
L, 2651059231UL, 2791405051UL, 93474487UL, 2865892488UL, 222759186UL, 21134
61797UL, 2178431077UL, 2325598341UL, 3189256113UL, 2505499508UL, 2986990416
UL, 2945846737UL, 3487931071UL, 2018175258UL, 2766733928UL, 2428033310UL, 1
307115286UL, 175147508UL, 3611190164UL, 1909211603UL, 1033628405UL, 1997433
19UL, 328621708UL, 3183670050UL, 1680331218UL, 4024297327UL, 3554549067UL,
2119566187UL, 1498503842UL, 3516256046UL, 290361143UL, 950288337UL, 1117344
941UL, 2150569143UL, 3182619063UL, 168826051UL, 888858617UL, 35840654UL, 28
29539211UL, 645798943UL, 2890882060UL, 3278412778UL, 2249895907UL, 13208580
68UL, 1436708568UL, 1794920145UL, 1522426851UL, 1903494122UL, 1928370573UL,
3693049252UL, 1251697758UL, 220280169UL, 433606853UL, 1428961479UL, 372441
5861UL, 2707115661UL, 467697583UL, 1616913929UL, 325674890UL, 1448052253UL,
122781510UL, 1689709565UL, 1493452467UL, 2222122038UL, 2177448198UL, 34257
23636UL, 3512035688UL, 1283058921UL, 4230441398UL, 3050940272UL, 1646155473
UL, 1418548715UL, 1190006478UL, 500654385UL, 1106232UL, 3920475367UL, 94057
4010UL, 922744002UL, 186142284UL, 4144806511UL, 54639113UL, 3031823448UL, 2
143051534UL, 1429025284UL, 2067453848UL, 4152687885UL, 2079235652UL, 244728
5474UL, 2669283767UL, 428527087UL, 1601035152UL, 3225501736UL, 2911340385UL
, 382319031UL, 2565464472UL, 3508441679UL, 1388990570UL, 1749179860UL, 1924
483707UL, 1737735237UL, 2835403456UL, 122872799UL, 2586347240UL, 880588515U
L, 597822462UL, 2958058367UL, 4270737941UL, 1336703451UL, 3390078162UL, 253
2634475UL, 450517882UL, 460422073UL, 3872117793UL, 803220151UL, 801648827UL
, 2066343874UL, 250339760UL, 2186373604UL, 564259972UL, 3417948976UL, 16837
87449UL, 657986735UL, 1075107552UL, 3692990573UL, 2235306692UL, 2615082840U
L, 3201950123UL, 2332395402UL, 207483321UL, 699310933UL, 2655424371UL, 3732
213278UL, 3151161301UL, 1629139881UL, 1152704006UL, 3236724760UL, 357261892
6UL, 2703537080UL, 3218957129UL, 2726926336UL, 9775065UL, 685933373UL, 4241
612717UL, 1034476784UL, 2398119652UL, 1621920075UL, 3855995181UL, 301001435
6UL, 1638490901UL, 252854480UL, 1509475888UL, 2355603679UL, 1133565821UL, 1
450937015UL, 2655911639UL, 3666188236UL, 1733804102UL, 3990855695UL, 123862
8750UL, 1115900497UL, 3900473826UL, 2651059231UL, 2791405051UL, 93474487UL,
1862985957UL, 222759186UL, 2113461797UL, 2178431077UL, 2325598341UL, 41790
75132UL, 2505499508UL, 2986990416UL, 2945846737UL, 3487931071UL, 564667776U
L, 2766733928UL, 2428033310UL, 1307115286UL, 175147508UL, 1759077815UL, 190
9211603UL, 1033628405UL, 199743319UL, 328621708UL, 2552816198UL, 1680331218
UL, 4024297327UL, 3554549067UL, 2119566187UL, 2267805778UL, 3516256046UL, 2
90361143UL, 950288337UL, 1117344941UL, 2897506172UL, 3182619063UL, 16882605
1UL, 888858617UL, 35840654UL, 2035476068UL, 645798943UL, 2890882060UL, 3278
412778UL, 2249895907UL, 3278449102UL, 1436708568UL, 1794920145UL, 152242685
1UL, 1903494122UL, 1500763736UL, 3693049252UL, 1251697758UL, 220280169UL, 4
33606853UL, 3914497854UL, 3724415861UL, 2707115661UL, 467697583UL, 16169139
29UL, 918435305UL, 1448052253UL, 122781510UL, 1689709565UL, 1493452467UL, 6
09575172UL, 2177448198UL, 3425723636UL, 3512035688UL, 1283058921UL, 3661181
550UL, 3050940272UL, 1646155473UL, 1418548715UL, 1190006478UL, 1047301661UL
, 1106232UL, 3920475367UL, 940574010UL, 922744002UL, 2510633517UL, 41448065
11UL, 54639113UL, 3031823448UL, 2143051534UL, 3242814908UL, 2067453848UL, 4
152687885UL, 2079235652UL, 2447285474UL, 736638210UL, 428527087UL, 16010351
52UL, 3225501736UL, 2911340385UL, 1849570436UL, 2565464472UL, 3508441679UL,
1388990570UL, 1749179860UL, 84517579UL, 1737735237UL, 2835403456UL, 122872
799UL, 2586347240UL, 4002124614UL, 597822462UL, 2958058367UL, 4270737941UL,
1336703451UL, 3078170472UL, 1186434751UL, 700631413UL, 1497890797UL, 11953
47450UL, 2560167391UL, 1116697259UL, 1254138573UL, 747913260UL, 240954704UL
, 3107512667UL, 360584144UL, 3422778960UL, 3516528389UL, 3301260366UL, 1254
513537UL, 122269053UL, 1579582456UL, 873334104UL, 3918835024UL, 1731872444U
L, 1974410416UL, 1811172641UL, 4172523062UL, 4092675777UL, 4124987343UL, 19
36078756UL, 1757348689UL, 2694415512UL, 128641660UL, 1744777659UL, 31731167
29UL, 983733754UL, 1430789547UL, 701906842UL, 3367232568UL, 3266433501UL, 3
572590347UL, 1453272962UL, 2106553114UL, 993786201UL, 2149441250UL, 1295181
065UL, 2962229026UL, 3709052556UL, 3255608941UL, 3677730029UL, 483873127UL,
102227292UL, 2626265293UL, 2018984578UL, 2266388762UL, 1191709548UL, 21527
25916UL, 583672623UL, 2230473473UL, 1995194269UL, 1740347812UL, 2558095372U
L, 3070195183UL, 3023333227UL, 2497183195UL, 1908755188UL, 773027539UL, 364
6876518UL, 2272586839UL, 493318726UL, 2107067517UL, 2000805278UL, 253082963
6UL, 3183628745UL, 677565332UL, 1497629423UL, 82094920UL, 2214054433UL, 263
5367545UL, 470855467UL, 2184853389UL, 2942188934UL, 188335670UL, 3656661644
UL, 1883526235UL, 3990873975UL, 1490784356UL, 4047548172UL, 3149642641UL, 3
289988179UL, 2590918909UL, 2893039564UL, 2350687346UL, 4252624874UL, 153724
56UL, 1614496594UL, 2364847678UL, 2604511825UL, 422365460UL, 4195174772UL,
3266964836UL, 2008671995UL, 54038434UL, 781948549UL, 1276017666UL, 27563766
12UL, 2436825273UL, 1711863836UL, 3541493950UL, 3821378841UL, 1007557618UL,
345375815UL, 2081905201UL, 2227278118UL, 1185927141UL, 1082173792UL, 35673
61925UL, 1940465859UL, 541632942UL, 1830210248UL, 3757851982UL, 775883450UL
, 1666577465UL, 1004944607UL, 878440834UL, 2146344131UL, 4195798476UL, 3701
64841UL, 3649112729UL, 37066142UL, 2311278904UL, 1935745497UL, 2304799402UL
, 4107299626UL, 1348526232UL, 2473609635UL, 3284032699UL, 2374292786UL, 176
2329186UL, 857978496UL, 1039346432UL, 2621413355UL, 29961014UL, 3582263091U
L, 4268542513UL, 3890612190UL, 3096173646UL, 2026544230UL, 3856142618UL, 23
47115934UL, 319800326UL, 3255916105UL, 2430273059UL, 823505311UL, 874255188
UL, 1401925393UL, 4203707857UL, 4259159566UL, 2606881118UL, 1978288664UL, 1
447576038UL, 3860341401UL, 412510348UL, }, {4052471963UL, 683640040UL, 3043
876021UL, 3466644483UL, 4222418025UL, 3035140128UL, 1466027937UL, 18198088U
L, 3410320851UL, 3040963721UL, 488404231UL, 3157371815UL, 769336092UL, 3240
417718UL, 808582581UL, 2075839263UL, 835026995UL, 3123726486UL, 3284240985U
L, 1898453053UL, 3606056482UL, 512836002UL, 2715428547UL, 4182302879UL, 164
4882480UL, 3160187826UL, 390292489UL, 980889545UL, 2776206633UL, 2482799995
UL, 617042280UL, 3501667414UL, 689451808UL, 497018701UL, 238525753UL, 38901
63301UL, 896679896UL, 1544533015UL, 3412477225UL, 3116575138UL, 4250402651U
L, 3990990746UL, 819056741UL, 1459334146UL, 158377590UL, 3444755752UL, 8230
450UL, 1378706455UL, 684191332UL, 3217423797UL, 2842520097UL, 1631477948UL,
2591254230UL, 959644473UL, 1020694107UL, 1748401915UL, 3452514983UL, 38927
66171UL, 1227786994UL, 2086180800UL, 2394613217UL, 2091953150UL, 870094953U
L, 2306851481UL, 571550601UL, 488878212UL, 873197214UL, 2630100528UL, 20674
76907UL, 2162307009UL, 2026119728UL, 115875280UL, 2905867426UL, 248774881UL
, 3110900450UL, 2236032812UL, 1888510348UL, 708001855UL, 996960491UL, 35141
96956UL, 1407967546UL, 1826568876UL, 3659618284UL, 2614104317UL, 2230066308
UL, 1055135881UL, 2537437343UL, 1858044413UL, 2608594891UL, 2750681169UL, 3
241939420UL, 3966440877UL, 2375002886UL, 2417753441UL, 1405878685UL, 108113
3199UL, 1496940727UL, 382467042UL, 2745477587UL, 1209424459UL, 811187075UL,
1385604734UL, 2623887355UL, 3443875720UL, 394141555UL, 4142998949UL, 41954
14618UL, 1489846841UL, 2253433808UL, 1171450286UL, 84131191UL, 4387588UL, 2
641405140UL, 3525405389UL, 3273000909UL, 423660319UL, 2366546732UL, 3698878
607UL, 2161119729UL, 4263629085UL, 3029102089UL, 2692507376UL, 3266869596UL
, 1658012061UL, 1960169440UL, 1002311379UL, 3724446882UL, 2004188516UL, 999
513506UL, 2200093802UL, 4141037460UL, 351865836UL, 412875013UL, 1535823315U
L, 3880657632UL, 3109944987UL, 3207577548UL, 3462087941UL, 584875517UL, 263
5241084UL, 3834145971UL, 1693380373UL, 3524443732UL, 934775214UL, 196058884
7UL, 2226778032UL, 1044609478UL, 12199016UL, 1120582000UL, 226430296UL, 665
553142UL, 2570993348UL, 1685535237UL, 3325420136UL, 3925248326UL, 285534637
6UL, 1205558328UL, 808835317UL, 3295908896UL, 4170076136UL, 2438272365UL, 4
052471963UL, 683640040UL, 3043876021UL, 3466644483UL, 1385549869UL, 3035140
128UL, 1466027937UL, 18198088UL, 3410320851UL, 2171386836UL, 488404231UL, 3
157371815UL, 769336092UL, 3240417718UL, 2921774554UL, 2075839263UL, 8350269
95UL, 3123726486UL, 3284240985UL, 72352110UL, 3606056482UL, 512836002UL, 27
15428547UL, 4182302879UL, 3869483469UL, 3160187826UL, 390292489UL, 98088954
5UL, 2776206633UL, 1385691983UL, 617042280UL, 3501667414UL, 689451808UL, 49
7018701UL, 2600411809UL, 3890163301UL, 896679896UL, 1544533015UL, 341247722
5UL, 356556378UL, 4250402651UL, 3990990746UL, 819056741UL, 1459334146UL, 19
9003993UL, 3444755752UL, 8230450UL, 1378706455UL, 684191332UL, 1750733272UL
, 2842520097UL, 1631477948UL, 2591254230UL, 959644473UL, 2113375576UL, 1748
401915UL, 3452514983UL, 3892766171UL, 1227786994UL, 275473920UL, 2394613217
UL, 2091953150UL, 870094953UL, 2306851481UL, 897057645UL, 488878212UL, 8731
97214UL, 2630100528UL, 2067476907UL, 944114068UL, 2026119728UL, 115875280UL
, 2905867426UL, 248774881UL, 989201307UL, 2236032812UL, 1888510348UL, 70800
1855UL, 996960491UL, 2121706374UL, 1407967546UL, 1826568876UL, 3659618284UL
, 2614104317UL, 2931815032UL, 1055135881UL, 2537437343UL, 1858044413UL, 260
8594891UL, 1423973935UL, 3241939420UL, 3966440877UL, 2375002886UL, 24177534
41UL, 2514473440UL, 1081133199UL, 1496940727UL, 382467042UL, 2745477587UL,
81977310UL, 811187075UL, 1385604734UL, 2623887355UL, 3443875720UL, 21006298
79UL, 4142998949UL, 4195414618UL, 1489846841UL, 2253433808UL, 337182869UL,
84131191UL, 4387588UL, 2641405140UL, 3525405389UL, 661876463UL, 423660319UL
, 2366546732UL, 3698878607UL, 2161119729UL, 309510684UL, 3029102089UL, 2692
507376UL, 3266869596UL, 1658012061UL, 11119541UL, 1002311379UL, 3724446882U
L, 2004188516UL, 999513506UL, 3486722046UL, 4141037460UL, 351865836UL, 4128
75013UL, 1535823315UL, 2818130700UL, 3109944987UL, 3207577548UL, 3462087941
UL, 584875517UL, 322875622UL, 3834145971UL, 1693380373UL, 3524443732UL, 934
775214UL, 3879414752UL, 2226778032UL, 1044609478UL, 12199016UL, 1120582000U
L, 4207259464UL, 665553142UL, 2570993348UL, 1685535237UL, 3325420136UL, 553
869152UL, 2855346376UL, 1205558328UL, 808835317UL, 3295908896UL, 470585896U
L, 2438272365UL, 4052471963UL, 683640040UL, 3043876021UL, 1588419572UL, 138
5549869UL, 3035140128UL, 1466027937UL, 18198088UL, 363815288UL, 2171386836U
L, 488404231UL, 3157371815UL, 769336092UL, 2464768302UL, 2921774554UL, 2075
839263UL, 835026995UL, 3123726486UL, 4229246330UL, 72352110UL, 3606056482UL
, 512836002UL, 2715428547UL, 319830805UL, 3869483469UL, 3160187826UL, 39029
2489UL, 980889545UL, 2966401462UL, 1385691983UL, 617042280UL, 3501667414UL,
689451808UL, 4047377762UL, 2600411809UL, 3890163301UL, 896679896UL, 154453
3015UL, 764316452UL, 356556378UL, 4250402651UL, 3990990746UL, 819056741UL,
965331966UL, 199003993UL, 3444755752UL, 8230450UL, 1378706455UL, 51902971UL
, 1750733272UL, 2842520097UL, 1631477948UL, 2591254230UL, 426039404UL, 2113
375576UL, 1748401915UL, 3452514983UL, 3892766171UL, 2833368447UL, 275473920
UL, 2394613217UL, 2091953150UL, 870094953UL, 3524323828UL, 897057645UL, 488
878212UL, 873197214UL, 2630100528UL, 3939852929UL, 944114068UL, 2026119728U
L, 115875280UL, 2905867426UL, 3192643919UL, 989201307UL, 2236032812UL, 1888
510348UL, 708001855UL, 2166012172UL, 2121706374UL, 1407967546UL, 1826568876
UL, 3659618284UL, 135277096UL, 2931815032UL, 1055135881UL, 2537437343UL, 18
58044413UL, 2588429924UL, 1423973935UL, 3241939420UL, 3966440877UL, 2375002
886UL, 2477142003UL, 2514473440UL, 1081133199UL, 1496940727UL, 382467042UL,
1760129281UL, 81977310UL, 811187075UL, 1385604734UL, 2623887355UL, 4070531
513UL, 2100629879UL, 4142998949UL, 4195414618UL, 1489846841UL, 2688068550UL
, 337182869UL, 84131191UL, 4387588UL, 2641405140UL, 1837403234UL, 661876463
UL, 423660319UL, 2366546732UL, 3698878607UL, 2916121190UL, 309510684UL, 302
9102089UL, 2692507376UL, 3266869596UL, 303422295UL, 11119541UL, 1002311379U
L, 3724446882UL, 2004188516UL, 2652711421UL, 3486722046UL, 4141037460UL, 35
1865836UL, 412875013UL, 113149471UL, 2818130700UL, 3109944987UL, 3207577548
UL, 3462087941UL, 1443140792UL, 322875622UL, 3834145971UL, 1693380373UL, 35
24443732UL, 901891935UL, 3879414752UL, 2226778032UL, 1044609478UL, 12199016
UL, 2213168758UL, 4207259464UL, 665553142UL, 2570993348UL, 1685535237UL, 11
14492412UL, 553869152UL, 2855346376UL, 1205558328UL, 808835317UL, 326662629
4UL, 470585896UL, 2438272365UL, 4052471963UL, 683640040UL, 3581539398UL, 15
88419572UL, 1385549869UL, 3035140128UL, 1466027937UL, 4075470388UL, 3638152
88UL, 2171386836UL, 488404231UL, 3157371815UL, 2759472233UL, 2464768302UL,
2921774554UL, 2075839263UL, 835026995UL, 1030654310UL, 4229246330UL, 723521
10UL, 3606056482UL, 512836002UL, 961858496UL, 319830805UL, 3869483469UL, 31
60187826UL, 390292489UL, 2366221117UL, 2966401462UL, 1385691983UL, 61704228
0UL, 3501667414UL, 295865937UL, 4047377762UL, 2600411809UL, 3890163301UL, 8
96679896UL, 21714884UL, 764316452UL, 356556378UL, 4250402651UL, 3990990746U
L, 1012967081UL, 965331966UL, 199003993UL, 3444755752UL, 8230450UL, 1255302
023UL, 51902971UL, 1750733272UL, 2842520097UL, 1631477948UL, 2321320272UL,
426039404UL, 2113375576UL, 1748401915UL, 3452514983UL, 2847013518UL, 283336
8447UL, 275473920UL, 2394613217UL, 2091953150UL, 1250695522UL, 3524323828UL
, 897057645UL, 488878212UL, 873197214UL, 1452317325UL, 3939852929UL, 944114
068UL, 2026119728UL, 115875280UL, 4061820350UL, 3192643919UL, 989201307UL,
2236032812UL, 1888510348UL, 3986446165UL, 2166012172UL, 2121706374UL, 14079
67546UL, 1826568876UL, 2910745432UL, 135277096UL, 2931815032UL, 1055135881U
L, 2537437343UL, 2976455307UL, 2588429924UL, 1423973935UL, 3241939420UL, 39
66440877UL, 2418897705UL, 2477142003UL, 2514473440UL, 1081133199UL, 1496940
727UL, 1321648771UL, 1760129281UL, 81977310UL, 811187075UL, 1385604734UL, 1
7644628UL, 4070531513UL, 2100629879UL, 4142998949UL, 4195414618UL, 26973105
27UL, 2688068550UL, 337182869UL, 84131191UL, 4387588UL, 1724191700UL, 18374
03234UL, 661876463UL, 423660319UL, 2366546732UL, 693430992UL, 2916121190UL,
309510684UL, 3029102089UL, 2692507376UL, 3917396098UL, 303422295UL, 111195
41UL, 1002311379UL, 3724446882UL, 841468294UL, 2652711421UL, 3486722046UL,
4141037460UL, 351865836UL, 1733384185UL, 113149471UL, 2818130700UL, 3109944
987UL, 3207577548UL, 2326233100UL, 1443140792UL, 322875622UL, 3834145971UL,
1693380373UL, 1580706359UL, 901891935UL, 3879414752UL, 2226778032UL, 10446
09478UL, 3805470822UL, 2213168758UL, 4207259464UL, 665553142UL, 2570993348U
L, 3406548636UL, 1114492412UL, 553869152UL, 2855346376UL, 1205558328UL, 428
7831475UL, 1329654114UL, 2347235746UL, 2477803138UL, 2962371859UL, 36100242
83UL, 4197266903UL, 1162294689UL, 1746713323UL, 2815058477UL, 2152552186UL,
4214791071UL, 2382522482UL, 3713914466UL, 3974765132UL, 348354997UL, 16702
76150UL, 2173074887UL, 381736894UL, 3866219357UL, 1919366695UL, 3635118824U
L, 2298653261UL, 3534332682UL, 1627699897UL, 4168636618UL, 3787938690UL, 21
44231271UL, 2067679462UL, 217001062UL, 2308928337UL, 1620415125UL, 35265591
72UL, 749451561UL, 2456947371UL, 3543607786UL, 1893824735UL, 962598819UL, 2
332807164UL, 1691114891UL, 2543992233UL, 2914780639UL, 1610287145UL, 170059
9697UL, 3185174208UL, 552323208UL, 2367242224UL, 3797136972UL, 3415066418UL
, 2468049249UL, 1677937401UL, 40445671UL, 2886682530UL, 2585715434UL, 19493
2329UL, 2994003812UL, 3099556382UL, 680852222UL, 135838738UL, 1371063256UL,
995454898UL, 3754526418UL, 803635682UL, 634588682UL, 3869250783UL, 2442285
521UL, 1455637058UL, 570621479UL, 2512681851UL, 1220136924UL, 750260121UL,
2909903038UL, 1582019728UL, 955115170UL, 1608265445UL, 2157390890UL, 230367
8604UL, 1568394164UL, 831914289UL, 1971271392UL, 1294799854UL, 1489945167UL
, 442427880UL, 1305083700UL, 1211218668UL, 2380073713UL, 2798736785UL, 2193
524273UL, 3227386915UL, 1636588977UL, 3612937642UL, 435113647UL, 1591761830
UL, 536210039UL, 2475747073UL, 4223795480UL, 1786737271UL, 1444661534UL, 32
49410301UL, 3333695212UL, 4169107188UL, 3280638635UL, 702659930UL, 14441279
70UL, 225340755UL, 2255629368UL, 746584456UL, 3965677674UL, 2671132955UL, 2
080717656UL, 2145343886UL, 3712441197UL, 368422910UL, 1297685674UL, 4076123
901UL, 26214470UL, 2948764826UL, 40503299UL, 1198194334UL, 2100063637UL, 19
66331612UL, 2189582064UL, 2064696934UL, 1797550642UL, 3469793941UL, 2868963
812UL, 851437659UL, 240918534UL, 365060070UL, 3530600064UL, 39695324UL, 175
3898837UL, 1286976449UL, 3131971360UL, 2406485219UL, 3365373704UL, 32241134
03UL, 1651742834UL, 587601940UL, 1574206085UL, 3739575036UL, 1413669616UL,
38172232UL, 293127854UL, 4126190109UL, 1891744061UL, 787878666UL, 456643669
UL, 4228710325UL, 2025132037UL, 1492133135UL, 3122840937UL, 969442079UL, 32
72420439UL, 3836126369UL, 1877655562UL, 2766212758UL, 3867984746UL, 3348077
578UL, 1841216706UL, }, {1676507466UL, 1017841240UL, 2992644565UL, 47693615
8UL, 2468072723UL, 3113105154UL, 1154120402UL, 460889625UL, 1942263502UL, 1
761593999UL, 3020908939UL, 3078194866UL, 310971889UL, 1644896012UL, 3756044
556UL, 3549937583UL, 3710822994UL, 3554313733UL, 2174654326UL, 4251063242UL
, 2340485150UL, 950951909UL, 4288936895UL, 3744348848UL, 706644559UL, 10859
27825UL, 1595992020UL, 3288724966UL, 1367247946UL, 2950094970UL, 3925419886
UL, 2628739022UL, 2528254629UL, 3582224789UL, 3907345559UL, 3373329273UL, 4
255542251UL, 1185418446UL, 4018656113UL, 2854344020UL, 1381160022UL, 364243
8773UL, 4284399225UL, 935780030UL, 4142412144UL, 1263328494UL, 1154237693UL
, 2684443667UL, 3067549398UL, 4253090033UL, 1251034970UL, 1874233020UL, 322
2830495UL, 3866931656UL, 286048055UL, 3146635362UL, 1436483376UL, 282187649
5UL, 3927829532UL, 2648886905UL, 2142862852UL, 1368937545UL, 2647327844UL,
1072219385UL, 2621337706UL, 3543274652UL, 911792564UL, 1204178178UL, 412721
4323UL, 2821691380UL, 3101998294UL, 730811902UL, 1989156224UL, 2872353003UL
, 278290276UL, 1390223786UL, 2657819643UL, 552729795UL, 1736270535UL, 27592
07116UL, 1897013739UL, 3657020278UL, 1387364861UL, 1966588302UL, 1049203087
UL, 486446521UL, 3675999281UL, 714737345UL, 686837530UL, 85509025UL, 360908
9773UL, 2117061768UL, 3935682560UL, 3859508784UL, 4105287041UL, 1808988481U
L, 83680601UL, 1464326680UL, 1657693523UL, 3318062731UL, 1391154023UL, 2344
60119UL, 3551348221UL, 2245244809UL, 3635923821UL, 2814385745UL, 3497626257
UL, 916790795UL, 245338628UL, 2514528380UL, 3711787525UL, 2239286063UL, 105
4058916UL, 3963706010UL, 3176203796UL, 2230543409UL, 2173597546UL, 37867338
92UL, 1396036965UL, 1038764273UL, 2032556038UL, 3216540537UL, 3298170974UL,
1008892557UL, 141155464UL, 1863766055UL, 3931110690UL, 191299053UL, 201913
9711UL, 2409528317UL, 739418419UL, 1377144055UL, 2876702705UL, 3911939673UL
, 1197696462UL, 2814009721UL, 600813233UL, 1535885024UL, 1486280357UL, 3084
650548UL, 2324695947UL, 2293284974UL, 2036339249UL, 3465600153UL, 162444610
8UL, 327866771UL, 3356772175UL, 1826625240UL, 1947102360UL, 3661848193UL, 1
421374867UL, 3228945021UL, 1358646008UL, 1067180174UL, 2190741258UL, 643362
354UL, 109899594UL, 2064362635UL, 3249674888UL, 2165543887UL, 4180291913UL,
1676507466UL, 1017841240UL, 2992644565UL, 476936158UL, 3608467942UL, 31131
05154UL, 1154120402UL, 460889625UL, 1942263502UL, 1862994005UL, 3020908939U
L, 3078194866UL, 310971889UL, 1644896012UL, 693774191UL, 3549937583UL, 3710
822994UL, 3554313733UL, 2174654326UL, 37658897UL, 2340485150UL, 950951909UL
, 4288936895UL, 3744348848UL, 2258231402UL, 1085927825UL, 1595992020UL, 328
8724966UL, 1367247946UL, 3850509554UL, 3925419886UL, 2628739022UL, 25282546
29UL, 3582224789UL, 3124287811UL, 3373329273UL, 4255542251UL, 1185418446UL,
4018656113UL, 1989726178UL, 1381160022UL, 3642438773UL, 4284399225UL, 9357
80030UL, 3622052196UL, 1263328494UL, 1154237693UL, 2684443667UL, 3067549398
UL, 2786224913UL, 1251034970UL, 1874233020UL, 3222830495UL, 3866931656UL, 1
529490307UL, 3146635362UL, 1436483376UL, 2821876495UL, 3927829532UL, 979247
444UL, 2142862852UL, 1368937545UL, 2647327844UL, 1072219385UL, 294065371UL,
3543274652UL, 911792564UL, 1204178178UL, 4127214323UL, 103582737UL, 310199
8294UL, 730811902UL, 1989156224UL, 2872353003UL, 1885087777UL, 1390223786UL
, 2657819643UL, 552729795UL, 1736270535UL, 3325206451UL, 1897013739UL, 3657
020278UL, 1387364861UL, 1966588302UL, 2117065739UL, 486446521UL, 3675999281
UL, 714737345UL, 686837530UL, 3946214694UL, 3609089773UL, 2117061768UL, 393
5682560UL, 3859508784UL, 2916136885UL, 1808988481UL, 83680601UL, 1464326680
UL, 1657693523UL, 3438751781UL, 1391154023UL, 234460119UL, 3551348221UL, 22
45244809UL, 3948410079UL, 2814385745UL, 3497626257UL, 916790795UL, 24533862
8UL, 1767303496UL, 3711787525UL, 2239286063UL, 1054058916UL, 3963706010UL,
4140631909UL, 2230543409UL, 2173597546UL, 3786733892UL, 1396036965UL, 11160
33475UL, 2032556038UL, 3216540537UL, 3298170974UL, 1008892557UL, 667272562U
L, 1863766055UL, 3931110690UL, 191299053UL, 2019139711UL, 272901326UL, 7394
18419UL, 1377144055UL, 2876702705UL, 3911939673UL, 3839312742UL, 2814009721
UL, 600813233UL, 1535885024UL, 1486280357UL, 4256065219UL, 2324695947UL, 22
93284974UL, 2036339249UL, 3465600153UL, 1215859603UL, 327866771UL, 33567721
75UL, 1826625240UL, 1947102360UL, 4240407984UL, 1421374867UL, 3228945021UL,
1358646008UL, 1067180174UL, 4100357988UL, 643362354UL, 109899594UL, 206436
2635UL, 3249674888UL, 2898852084UL, 4180291913UL, 1676507466UL, 1017841240U
L, 2992644565UL, 1569683812UL, 3608467942UL, 3113105154UL, 1154120402UL, 46
0889625UL, 966040649UL, 1862994005UL, 3020908939UL, 3078194866UL, 310971889
UL, 786634113UL, 693774191UL, 3549937583UL, 3710822994UL, 3554313733UL, 157
8429713UL, 37658897UL, 2340485150UL, 950951909UL, 4288936895UL, 2528123823U
L, 2258231402UL, 1085927825UL, 1595992020UL, 3288724966UL, 3544041088UL, 38
50509554UL, 3925419886UL, 2628739022UL, 2528254629UL, 2562145937UL, 3124287
811UL, 3373329273UL, 4255542251UL, 1185418446UL, 3693565710UL, 1989726178UL
, 1381160022UL, 3642438773UL, 4284399225UL, 3271478204UL, 3622052196UL, 126
3328494UL, 1154237693UL, 2684443667UL, 3615401444UL, 2786224913UL, 12510349
70UL, 1874233020UL, 3222830495UL, 2572413057UL, 1529490307UL, 3146635362UL,
1436483376UL, 2821876495UL, 3993894153UL, 979247444UL, 2142862852UL, 13689
37545UL, 2647327844UL, 1353904396UL, 294065371UL, 3543274652UL, 911792564UL
, 1204178178UL, 3165709748UL, 103582737UL, 3101998294UL, 730811902UL, 19891
56224UL, 893293786UL, 1885087777UL, 1390223786UL, 2657819643UL, 552729795UL
, 3388458110UL, 3325206451UL, 1897013739UL, 3657020278UL, 1387364861UL, 302
5318046UL, 2117065739UL, 486446521UL, 3675999281UL, 714737345UL, 2085926890
UL, 3946214694UL, 3609089773UL, 2117061768UL, 3935682560UL, 868009118UL, 29
16136885UL, 1808988481UL, 83680601UL, 1464326680UL, 797410789UL, 3438751781
UL, 1391154023UL, 234460119UL, 3551348221UL, 4068940987UL, 3948410079UL, 28
14385745UL, 3497626257UL, 916790795UL, 3722456098UL, 1767303496UL, 37117875
25UL, 2239286063UL, 1054058916UL, 2030352819UL, 4140631909UL, 2230543409UL,
2173597546UL, 3786733892UL, 3211336683UL, 1116033475UL, 2032556038UL, 3216
540537UL, 3298170974UL, 2589589144UL, 667272562UL, 1863766055UL, 3931110690
UL, 191299053UL, 1139480458UL, 272901326UL, 739418419UL, 1377144055UL, 2876
702705UL, 1954361769UL, 3839312742UL, 2814009721UL, 600813233UL, 1535885024
UL, 3587775605UL, 4256065219UL, 2324695947UL, 2293284974UL, 2036339249UL, 1
534849280UL, 1215859603UL, 327866771UL, 3356772175UL, 1826625240UL, 7203726
69UL, 4240407984UL, 1421374867UL, 3228945021UL, 1358646008UL, 3409069246UL,
4100357988UL, 643362354UL, 109899594UL, 2064362635UL, 4243434294UL, 289885
2084UL, 4180291913UL, 1676507466UL, 1017841240UL, 3243922356UL, 1569683812U
L, 3608467942UL, 3113105154UL, 1154120402UL, 1479311403UL, 966040649UL, 186
2994005UL, 3020908939UL, 3078194866UL, 1556392996UL, 786634113UL, 693774191
UL, 3549937583UL, 3710822994UL, 920664071UL, 1578429713UL, 37658897UL, 2340
485150UL, 950951909UL, 740197415UL, 2528123823UL, 2258231402UL, 1085927825U
L, 1595992020UL, 2580760267UL, 3544041088UL, 3850509554UL, 3925419886UL, 26
28739022UL, 3867556156UL, 2562145937UL, 3124287811UL, 3373329273UL, 4255542
251UL, 3185271749UL, 3693565710UL, 1989726178UL, 1381160022UL, 3642438773UL
, 3042165367UL, 3271478204UL, 3622052196UL, 1263328494UL, 1154237693UL, 101
6814036UL, 3615401444UL, 2786224913UL, 1251034970UL, 1874233020UL, 29560869
71UL, 2572413057UL, 1529490307UL, 3146635362UL, 1436483376UL, 1513970396UL,
3993894153UL, 979247444UL, 2142862852UL, 1368937545UL, 3275665128UL, 13539
04396UL, 294065371UL, 3543274652UL, 911792564UL, 2209636872UL, 3165709748UL
, 103582737UL, 3101998294UL, 730811902UL, 965151434UL, 893293786UL, 1885087
777UL, 1390223786UL, 2657819643UL, 3278634059UL, 3388458110UL, 3325206451UL
, 1897013739UL, 3657020278UL, 4293473749UL, 3025318046UL, 2117065739UL, 486
446521UL, 3675999281UL, 620561205UL, 2085926890UL, 3946214694UL, 3609089773
UL, 2117061768UL, 163384588UL, 868009118UL, 2916136885UL, 1808988481UL, 836
80601UL, 10243015UL, 797410789UL, 3438751781UL, 1391154023UL, 234460119UL,
1278218413UL, 4068940987UL, 3948410079UL, 2814385745UL, 3497626257UL, 12332
72798UL, 3722456098UL, 1767303496UL, 3711787525UL, 2239286063UL, 3968895688
UL, 2030352819UL, 4140631909UL, 2230543409UL, 2173597546UL, 2866251044UL, 3
211336683UL, 1116033475UL, 2032556038UL, 3216540537UL, 4233849723UL, 258958
9144UL, 667272562UL, 1863766055UL, 3931110690UL, 2468422423UL, 1139480458UL
, 272901326UL, 739418419UL, 1377144055UL, 4240143411UL, 1954361769UL, 38393
12742UL, 2814009721UL, 600813233UL, 3976840004UL, 3587775605UL, 4256065219U
L, 2324695947UL, 2293284974UL, 437604123UL, 1534849280UL, 1215859603UL, 327
866771UL, 3356772175UL, 2757237699UL, 720372669UL, 4240407984UL, 1421374867
UL, 3228945021UL, 3284801305UL, 3409069246UL, 4100357988UL, 643362354UL, 10
9899594UL, 1301585321UL, 2528806870UL, 1838904064UL, 448772403UL, 109784974
0UL, 1899994097UL, 618309123UL, 1911948510UL, 2309256224UL, 1861398151UL, 9
05306403UL, 1067595802UL, 36868624UL, 3780886191UL, 835126206UL, 3190251977
UL, 2672497726UL, 2085944002UL, 2912993968UL, 2493776706UL, 667136329UL, 14
74890786UL, 2383346554UL, 943528949UL, 3376706013UL, 2495573574UL, 14495634
5UL, 793159960UL, 1591274917UL, 477107637UL, 1383815442UL, 67384899UL, 2355
242218UL, 1687409818UL, 3801093871UL, 2108217811UL, 3455908733UL, 417216079
7UL, 3935534685UL, 631067839UL, 1187677548UL, 2280856137UL, 3020767646UL, 2
063176246UL, 3736904984UL, 2952933848UL, 2975164686UL, 4144473303UL, 346709
77UL, 1250976509UL, 3484166554UL, 1532744745UL, 225700994UL, 1878713627UL,
2122358980UL, 1456610194UL, 2917522161UL, 2818947075UL, 102678939UL, 537438
58UL, 2095250656UL, 4023979225UL, 3094092874UL, 4128760696UL, 3411610028UL,
3020200609UL, 2225866341UL, 586320946UL, 63813522UL, 1238216159UL, 2825692
263UL, 2169937231UL, 3298517640UL, 1542128261UL, 2205544184UL, 1258655704UL
, 2629012083UL, 4113650203UL, 3198617867UL, 2742310794UL, 3372657381UL, 311
5904410UL, 1948638822UL, 1123521744UL, 1080429281UL, 4086706732UL, 41426932
11UL, 817377147UL, 2570194641UL, 26001503UL, 2861456160UL, 4185725555UL, 25
73003804UL, 1618628779UL, 2588489212UL, 3996192609UL, 1555844274UL, 1003123
505UL, 1326350123UL, 1130583849UL, 3017128756UL, 74119042UL, 4041266437UL,
1938014170UL, 3528465794UL, 4203969698UL, 1913054398UL, 3617979809UL, 22188
10167UL, 2453899816UL, 1997423206UL, 477446533UL, 303090065UL, 757937082UL,
1523238256UL, 3140505311UL, 1422588701UL, 3642014639UL, 1740624195UL, 1276
017154UL, 3072526193UL, 3675105122UL, 1335122682UL, 4080595263UL, 230851942
0UL, 3299182769UL, 1461978532UL, 3098694217UL, 2982399822UL, 3088698511UL,
586759229UL, 3548750902UL, 1449857891UL, 2866451663UL, 2525162286UL, 572946
02UL, 4107991297UL, 1214672265UL, 2940391280UL, 4285346034UL, 3338216759UL,
737207923UL, 4264163846UL, 59219141UL, 2300024654UL, 1876616814UL, 1976543
605UL, 783571061UL, 1724699622UL, 1967524469UL, 1650309916UL, 3322257631UL,
3975521122UL, 273342162UL, 1156754241UL, 185315896UL, 3368133921UL, 663146
55UL, 4153777915UL, 3519901897UL, }, {3672467167UL, 68684525UL, 1738833632U
L, 3081329135UL, 2583806115UL, 2291130512UL, 503032614UL, 3658059597UL, 571
493931UL, 685537959UL, 3498787788UL, 422428426UL, 3879256913UL, 1173158320U
L, 4000800121UL, 298972869UL, 1718342816UL, 2541691685UL, 2490502642UL, 232
1452806UL, 4223212804UL, 1812334632UL, 3717655725UL, 4238191852UL, 30013071
65UL, 2621896355UL, 2572404999UL, 3590094954UL, 760765206UL, 2293618001UL,
1392353032UL, 1733137169UL, 2674005018UL, 4067961151UL, 1505710487UL, 45107
8217UL, 2591688848UL, 12635611UL, 507045428UL, 694822241UL, 1789383090UL, 1
140183890UL, 1720695967UL, 1994318191UL, 3340349873UL, 2793804971UL, 105443
3135UL, 2345087879UL, 3179939285UL, 1651968615UL, 1793223686UL, 1055357758U
L, 914271617UL, 483007580UL, 2127727816UL, 2754998083UL, 3179053982UL, 5984
42002UL, 1950227301UL, 213053613UL, 3566888111UL, 2832258993UL, 4260365359U
L, 443662829UL, 1706542890UL, 3852730296UL, 3643260763UL, 2163607277UL, 181
2905006UL, 171529637UL, 215187467UL, 2369406909UL, 1929000706UL, 2572441025
UL, 2133955541UL, 810692262UL, 1337974799UL, 4030350704UL, 2159178715UL, 37
69451556UL, 1026825278UL, 593628480UL, 1817383139UL, 878832429UL, 225387635
0UL, 203612980UL, 2102950440UL, 3407143936UL, 1912362251UL, 1595387637UL, 2
827580539UL, 305467658UL, 3292706746UL, 44135525UL, 4001933553UL, 369734308
9UL, 760470915UL, 587414402UL, 1419378814UL, 2852774010UL, 3891626781UL, 27
57016765UL, 1090707384UL, 3997074427UL, 1047182100UL, 2855539022UL, 3622915
9UL, 1591415533UL, 3471572739UL, 1237952140UL, 2614469314UL, 213338525UL, 8
86212578UL, 2620301943UL, 713590207UL, 2430496777UL, 1198164420UL, 26448416
98UL, 3654164701UL, 36283572UL, 1461695896UL, 1770331341UL, 1641501876UL, 3
470919184UL, 3181021559UL, 3053795110UL, 3533531372UL, 3134337355UL, 668308
383UL, 388340999UL, 3221275220UL, 1589659138UL, 294382235UL, 1447443579UL,
690177534UL, 1799726917UL, 2838977761UL, 4172949119UL, 2360858031UL, 159385
920UL, 2248389027UL, 1790015671UL, 3925738275UL, 1049918544UL, 4107349511UL
, 1619955951UL, 4188275966UL, 1672572975UL, 2672697497UL, 1863413666UL, 747
724021UL, 4037561738UL, 1605940213UL, 445253292UL, 3362434828UL, 610898209U
L, 1473244091UL, 735444769UL, 1540599852UL, 2449351720UL, 1032410949UL, 367
2467167UL, 68684525UL, 1738833632UL, 3081329135UL, 519684794UL, 2291130512U
L, 503032614UL, 3658059597UL, 571493931UL, 2400186105UL, 3498787788UL, 4224
28426UL, 3879256913UL, 1173158320UL, 4120704752UL, 298972869UL, 1718342816U
L, 2541691685UL, 2490502642UL, 1686027891UL, 4223212804UL, 1812334632UL, 37
17655725UL, 4238191852UL, 642431972UL, 2621896355UL, 2572404999UL, 35900949
54UL, 760765206UL, 2949609717UL, 1392353032UL, 1733137169UL, 2674005018UL,
4067961151UL, 1526077846UL, 451078217UL, 2591688848UL, 12635611UL, 50704542
8UL, 2417951415UL, 1789383090UL, 1140183890UL, 1720695967UL, 1994318191UL,
3465605863UL, 2793804971UL, 1054433135UL, 2345087879UL, 3179939285UL, 30792
97626UL, 1793223686UL, 1055357758UL, 914271617UL, 483007580UL, 306802527UL,
2754998083UL, 3179053982UL, 598442002UL, 1950227301UL, 2473418737UL, 35668
88111UL, 2832258993UL, 4260365359UL, 443662829UL, 2097776414UL, 3852730296U
L, 3643260763UL, 2163607277UL, 1812905006UL, 3957721904UL, 215187467UL, 236
9406909UL, 1929000706UL, 2572441025UL, 3779486126UL, 810692262UL, 133797479
9UL, 4030350704UL, 2159178715UL, 1127012865UL, 1026825278UL, 593628480UL, 1
817383139UL, 878832429UL, 361018423UL, 203612980UL, 2102950440UL, 340714393
6UL, 1912362251UL, 1475218277UL, 2827580539UL, 305467658UL, 3292706746UL, 4
4135525UL, 1900092336UL, 3697343089UL, 760470915UL, 587414402UL, 1419378814
UL, 343303227UL, 3891626781UL, 2757016765UL, 1090707384UL, 3997074427UL, 74
5490961UL, 2855539022UL, 36229159UL, 1591415533UL, 3471572739UL, 3920625546
UL, 2614469314UL, 213338525UL, 886212578UL, 2620301943UL, 827771411UL, 2430
496777UL, 1198164420UL, 2644841698UL, 3654164701UL, 2747674190UL, 146169589
6UL, 1770331341UL, 1641501876UL, 3470919184UL, 919857376UL, 3053795110UL, 3
533531372UL, 3134337355UL, 668308383UL, 201138876UL, 3221275220UL, 15896591
38UL, 294382235UL, 1447443579UL, 4211579707UL, 1799726917UL, 2838977761UL,
4172949119UL, 2360858031UL, 416103844UL, 2248389027UL, 1790015671UL, 392573
8275UL, 1049918544UL, 3481887924UL, 1619955951UL, 4188275966UL, 1672572975U
L, 2672697497UL, 564854400UL, 747724021UL, 4037561738UL, 1605940213UL, 4452
53292UL, 604900912UL, 610898209UL, 1473244091UL, 735444769UL, 1540599852UL,
3036173307UL, 1032410949UL, 3672467167UL, 68684525UL, 1738833632UL, 973022
696UL, 519684794UL, 2291130512UL, 503032614UL, 3658059597UL, 1500301452UL,
2400186105UL, 3498787788UL, 422428426UL, 3879256913UL, 3923611748UL, 412070
4752UL, 298972869UL, 1718342816UL, 2541691685UL, 2323881484UL, 1686027891UL
, 4223212804UL, 1812334632UL, 3717655725UL, 2109094458UL, 642431972UL, 2621
896355UL, 2572404999UL, 3590094954UL, 1837882537UL, 2949609717UL, 139235303
2UL, 1733137169UL, 2674005018UL, 3252348987UL, 1526077846UL, 451078217UL, 2
591688848UL, 12635611UL, 3971261781UL, 2417951415UL, 1789383090UL, 11401838
90UL, 1720695967UL, 2906966040UL, 3465605863UL, 2793804971UL, 1054433135UL,
2345087879UL, 915518921UL, 3079297626UL, 1793223686UL, 1055357758UL, 91427
1617UL, 791633499UL, 306802527UL, 2754998083UL, 3179053982UL, 598442002UL,
324402573UL, 2473418737UL, 3566888111UL, 2832258993UL, 4260365359UL, 216804
6398UL, 2097776414UL, 3852730296UL, 3643260763UL, 2163607277UL, 2595175979U
L, 3957721904UL, 215187467UL, 2369406909UL, 1929000706UL, 657446369UL, 3779
486126UL, 810692262UL, 1337974799UL, 4030350704UL, 1865557469UL, 1127012865
UL, 1026825278UL, 593628480UL, 1817383139UL, 3414354529UL, 361018423UL, 203
612980UL, 2102950440UL, 3407143936UL, 1739372987UL, 1475218277UL, 282758053
9UL, 305467658UL, 3292706746UL, 825045562UL, 1900092336UL, 3697343089UL, 76
0470915UL, 587414402UL, 2000637694UL, 343303227UL, 3891626781UL, 2757016765
UL, 1090707384UL, 4015377800UL, 745490961UL, 2855539022UL, 36229159UL, 1591
415533UL, 2208656873UL, 3920625546UL, 2614469314UL, 213338525UL, 886212578U
L, 2729976209UL, 827771411UL, 2430496777UL, 1198164420UL, 2644841698UL, 192
2667440UL, 2747674190UL, 1461695896UL, 1770331341UL, 1641501876UL, 35753531
1UL, 919857376UL, 3053795110UL, 3533531372UL, 3134337355UL, 1004072597UL, 2
01138876UL, 3221275220UL, 1589659138UL, 294382235UL, 1148950143UL, 42115797
07UL, 1799726917UL, 2838977761UL, 4172949119UL, 892664404UL, 416103844UL, 2
248389027UL, 1790015671UL, 3925738275UL, 2612357890UL, 3481887924UL, 161995
5951UL, 4188275966UL, 1672572975UL, 2005534713UL, 564854400UL, 747724021UL,
4037561738UL, 1605940213UL, 2620990454UL, 604900912UL, 610898209UL, 147324
4091UL, 735444769UL, 3571225334UL, 3036173307UL, 1032410949UL, 3672467167UL
, 68684525UL, 3327351604UL, 973022696UL, 519684794UL, 2291130512UL, 5030326
14UL, 3814902238UL, 1500301452UL, 2400186105UL, 3498787788UL, 422428426UL,
1756753750UL, 3923611748UL, 4120704752UL, 298972869UL, 1718342816UL, 652903
081UL, 2323881484UL, 1686027891UL, 4223212804UL, 1812334632UL, 1599640566UL
, 2109094458UL, 642431972UL, 2621896355UL, 2572404999UL, 1668409355UL, 1837
882537UL, 2949609717UL, 1392353032UL, 1733137169UL, 3691709793UL, 325234898
7UL, 1526077846UL, 451078217UL, 2591688848UL, 3353622601UL, 3971261781UL, 2
417951415UL, 1789383090UL, 1140183890UL, 4113853791UL, 2906966040UL, 346560
5863UL, 2793804971UL, 1054433135UL, 2195882948UL, 915518921UL, 3079297626UL
, 1793223686UL, 1055357758UL, 898713552UL, 791633499UL, 306802527UL, 275499
8083UL, 3179053982UL, 2469350088UL, 324402573UL, 2473418737UL, 3566888111UL
, 2832258993UL, 1377718274UL, 2168046398UL, 2097776414UL, 3852730296UL, 364
3260763UL, 3492388484UL, 2595175979UL, 3957721904UL, 215187467UL, 236940690
9UL, 4243449339UL, 657446369UL, 3779486126UL, 810692262UL, 1337974799UL, 39
60230785UL, 1865557469UL, 1127012865UL, 1026825278UL, 593628480UL, 73279331
2UL, 3414354529UL, 361018423UL, 203612980UL, 2102950440UL, 2401792405UL, 17
39372987UL, 1475218277UL, 2827580539UL, 305467658UL, 2454275289UL, 82504556
2UL, 1900092336UL, 3697343089UL, 760470915UL, 2146882409UL, 2000637694UL, 3
43303227UL, 3891626781UL, 2757016765UL, 3997473261UL, 4015377800UL, 7454909
61UL, 2855539022UL, 36229159UL, 2375394427UL, 2208656873UL, 3920625546UL, 2
614469314UL, 213338525UL, 2055366274UL, 2729976209UL, 827771411UL, 24304967
77UL, 1198164420UL, 1789631187UL, 1922667440UL, 2747674190UL, 1461695896UL,
1770331341UL, 4284442852UL, 357535311UL, 919857376UL, 3053795110UL, 353353
1372UL, 2124270060UL, 1004072597UL, 201138876UL, 3221275220UL, 1589659138UL
, 1418386120UL, 1148950143UL, 4211579707UL, 1799726917UL, 2838977761UL, 354
0708069UL, 892664404UL, 416103844UL, 2248389027UL, 1790015671UL, 3936883UL,
2612357890UL, 3481887924UL, 1619955951UL, 4188275966UL, 2963623483UL, 2005
534713UL, 564854400UL, 747724021UL, 4037561738UL, 3431155922UL, 2620990454U
L, 604900912UL, 610898209UL, 1473244091UL, 3880001339UL, 2879060316UL, 3300
897679UL, 3960972039UL, 3201086624UL, 3814462934UL, 3426650044UL, 193088163
2UL, 1981178788UL, 2956279691UL, 4272406256UL, 372705521UL, 1359389771UL, 1
590302979UL, 3940206208UL, 3817999127UL, 2527835456UL, 2739078164UL, 716997
849UL, 3235607043UL, 2550297745UL, 3688700200UL, 354502605UL, 2285793656UL,
2339138034UL, 3912354142UL, 2262255668UL, 469322622UL, 1319943359UL, 19161
01235UL, 200441823UL, 509436982UL, 2160284593UL, 1687919695UL, 4153615582UL
, 495735041UL, 3694469424UL, 2086893117UL, 4223008799UL, 105344742UL, 16980
33424UL, 1149223145UL, 4183918790UL, 4176151950UL, 415739351UL, 817762972UL
, 3768072560UL, 1931430949UL, 2698979439UL, 3481477932UL, 1994322914UL, 407
8299950UL, 1268233995UL, 3254069145UL, 91029129UL, 498234704UL, 1636613942U
L, 3710087092UL, 3876816560UL, 3510446387UL, 3870169008UL, 1370156410UL, 24
42498047UL, 2324396523UL, 1258730334UL, 621954739UL, 1053015373UL, 49182071
7UL, 3386515432UL, 2203703266UL, 120167176UL, 2383669740UL, 1038666440UL, 2
927342870UL, 3583197824UL, 1236241846UL, 2474675929UL, 679052891UL, 2451259
584UL, 2177706146UL, 606842882UL, 3546980104UL, 2289281509UL, 353873434UL,
2041926837UL, 1238346748UL, 2729109726UL, 2843938395UL, 2938124210UL, 25544
43866UL, 1494477920UL, 693378319UL, 2020963566UL, 2000385949UL, 3744098787U
L, 650307220UL, 2631327075UL, 1529128757UL, 595871428UL, 3206666562UL, 4580
62987UL, 875238192UL, 3729317374UL, 1368843921UL, 3478430230UL, 3234384578U
L, 3232435428UL, 321359326UL, 994274524UL, 361184397UL, 4285497594UL, 91526
3578UL, 1486882838UL, 9988613UL, 829077170UL, 677216046UL, 4141828204UL, 16
5804609UL, 1086678519UL, 2933434608UL, 1351662802UL, 2640085040UL, 26115029
32UL, 2033698714UL, 2008873254UL, 3995557835UL, 1020873906UL, 67873555UL, 2
230337823UL, 1263800417UL, 1148712155UL, 3985159589UL, 2979503513UL, 285471
4997UL, 1539343345UL, 2751484352UL, 1569100732UL, 2020758949UL, 2126757134U
L, 3426641899UL, 2808587825UL, 1953320148UL, 1096398464UL, 1502907172UL, 37
51230087UL, 765557661UL, 765290990UL, 3056075500UL, 2040620632UL, 422573751
UL, 3613558930UL, 1741145769UL, 273531216UL, 837238736UL, 494297893UL, 2903
251124UL, 1636782182UL, 4256592784UL, 3652746656UL, 4258393217UL, }, | |
| }; | | }; | |
| | | | |
|
| | | static __constant__ unsigned int precalc_xorwow_offset_matrix[8][800] = { | |
| | | {0UL, 0UL, 0UL, 0UL, 3UL, 0UL, 0UL, 0UL, 0UL, 6UL, 0UL, 0UL, 0UL, 0UL, 15UL | |
| | | , 0UL, 0UL, 0UL, 0UL, 30UL, 0UL, 0UL, 0UL, 0UL, 60UL, 0UL, 0UL, 0UL, 0UL, 1 | |
| | | 20UL, 0UL, 0UL, 0UL, 0UL, 240UL, 0UL, 0UL, 0UL, 0UL, 480UL, 0UL, 0UL, 0UL, | |
| | | 0UL, 960UL, 0UL, 0UL, 0UL, 0UL, 1920UL, 0UL, 0UL, 0UL, 0UL, 3840UL, 0UL, 0U | |
| | | L, 0UL, 0UL, 7680UL, 0UL, 0UL, 0UL, 0UL, 15360UL, 0UL, 0UL, 0UL, 0UL, 30720 | |
| | | UL, 0UL, 0UL, 0UL, 0UL, 61440UL, 0UL, 0UL, 0UL, 0UL, 122880UL, 0UL, 0UL, 0U | |
| | | L, 0UL, 245760UL, 0UL, 0UL, 0UL, 0UL, 491520UL, 0UL, 0UL, 0UL, 0UL, 983040U | |
| | | L, 0UL, 0UL, 0UL, 0UL, 1966080UL, 0UL, 0UL, 0UL, 0UL, 3932160UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 7864320UL, 0UL, 0UL, 0UL, 0UL, 15728640UL, 0UL, 0UL, 0UL, 0UL, 31 | |
| | | 457280UL, 0UL, 0UL, 0UL, 0UL, 62914560UL, 0UL, 0UL, 0UL, 0UL, 125829120UL, | |
| | | 0UL, 0UL, 0UL, 0UL, 251658240UL, 0UL, 0UL, 0UL, 0UL, 503316480UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 1006632960UL, 0UL, 0UL, 0UL, 0UL, 2013265920UL, 0UL, 0UL, 0UL, 0 | |
| | | UL, 4026531840UL, 0UL, 0UL, 0UL, 0UL, 3758096384UL, 1UL, 0UL, 0UL, 0UL, 0UL | |
| | | , 2UL, 0UL, 0UL, 0UL, 0UL, 4UL, 0UL, 0UL, 0UL, 0UL, 8UL, 0UL, 0UL, 0UL, 0UL | |
| | | , 16UL, 0UL, 0UL, 0UL, 0UL, 32UL, 0UL, 0UL, 0UL, 0UL, 64UL, 0UL, 0UL, 0UL, | |
| | | 0UL, 128UL, 0UL, 0UL, 0UL, 0UL, 256UL, 0UL, 0UL, 0UL, 0UL, 512UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 1024UL, 0UL, 0UL, 0UL, 0UL, 2048UL, 0UL, 0UL, 0UL, 0UL, 4096UL, | |
| | | 0UL, 0UL, 0UL, 0UL, 8192UL, 0UL, 0UL, 0UL, 0UL, 16384UL, 0UL, 0UL, 0UL, 0UL | |
| | | , 32768UL, 0UL, 0UL, 0UL, 0UL, 65536UL, 0UL, 0UL, 0UL, 0UL, 131072UL, 0UL, | |
| | | 0UL, 0UL, 0UL, 262144UL, 0UL, 0UL, 0UL, 0UL, 524288UL, 0UL, 0UL, 0UL, 0UL, | |
| | | 1048576UL, 0UL, 0UL, 0UL, 0UL, 2097152UL, 0UL, 0UL, 0UL, 0UL, 4194304UL, 0U | |
| | | L, 0UL, 0UL, 0UL, 8388608UL, 0UL, 0UL, 0UL, 0UL, 16777216UL, 0UL, 0UL, 0UL, | |
| | | 0UL, 33554432UL, 0UL, 0UL, 0UL, 0UL, 67108864UL, 0UL, 0UL, 0UL, 0UL, 13421 | |
| | | 7728UL, 0UL, 0UL, 0UL, 0UL, 268435456UL, 0UL, 0UL, 0UL, 0UL, 536870912UL, 0 | |
| | | UL, 0UL, 0UL, 0UL, 1073741824UL, 0UL, 0UL, 0UL, 0UL, 2147483648UL, 0UL, 0UL | |
| | | , 0UL, 0UL, 0UL, 1UL, 0UL, 0UL, 0UL, 0UL, 2UL, 0UL, 0UL, 0UL, 0UL, 4UL, 0UL | |
| | | , 0UL, 0UL, 0UL, 8UL, 0UL, 0UL, 0UL, 0UL, 16UL, 0UL, 0UL, 0UL, 0UL, 32UL, 0 | |
| | | UL, 0UL, 0UL, 0UL, 64UL, 0UL, 0UL, 0UL, 0UL, 128UL, 0UL, 0UL, 0UL, 0UL, 256 | |
| | | UL, 0UL, 0UL, 0UL, 0UL, 512UL, 0UL, 0UL, 0UL, 0UL, 1024UL, 0UL, 0UL, 0UL, 0 | |
| | | UL, 2048UL, 0UL, 0UL, 0UL, 0UL, 4096UL, 0UL, 0UL, 0UL, 0UL, 8192UL, 0UL, 0U | |
| | | L, 0UL, 0UL, 16384UL, 0UL, 0UL, 0UL, 0UL, 32768UL, 0UL, 0UL, 0UL, 0UL, 6553 | |
| | | 6UL, 0UL, 0UL, 0UL, 0UL, 131072UL, 0UL, 0UL, 0UL, 0UL, 262144UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 524288UL, 0UL, 0UL, 0UL, 0UL, 1048576UL, 0UL, 0UL, 0UL, 0UL, 2097 | |
| | | 152UL, 0UL, 0UL, 0UL, 0UL, 4194304UL, 0UL, 0UL, 0UL, 0UL, 8388608UL, 0UL, 0 | |
| | | UL, 0UL, 0UL, 16777216UL, 0UL, 0UL, 0UL, 0UL, 33554432UL, 0UL, 0UL, 0UL, 0U | |
| | | L, 67108864UL, 0UL, 0UL, 0UL, 0UL, 134217728UL, 0UL, 0UL, 0UL, 0UL, 2684354 | |
| | | 56UL, 0UL, 0UL, 0UL, 0UL, 536870912UL, 0UL, 0UL, 0UL, 0UL, 1073741824UL, 0U | |
| | | L, 0UL, 0UL, 0UL, 2147483648UL, 0UL, 0UL, 0UL, 0UL, 0UL, 1UL, 0UL, 0UL, 0UL | |
| | | , 0UL, 2UL, 0UL, 0UL, 0UL, 0UL, 4UL, 0UL, 0UL, 0UL, 0UL, 8UL, 0UL, 0UL, 0UL | |
| | | , 0UL, 16UL, 0UL, 0UL, 0UL, 0UL, 32UL, 0UL, 0UL, 0UL, 0UL, 64UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 128UL, 0UL, 0UL, 0UL, 0UL, 256UL, 0UL, 0UL, 0UL, 0UL, 512UL, 0UL, | |
| | | 0UL, 0UL, 0UL, 1024UL, 0UL, 0UL, 0UL, 0UL, 2048UL, 0UL, 0UL, 0UL, 0UL, 409 | |
| | | 6UL, 0UL, 0UL, 0UL, 0UL, 8192UL, 0UL, 0UL, 0UL, 0UL, 16384UL, 0UL, 0UL, 0UL | |
| | | , 0UL, 32768UL, 0UL, 0UL, 0UL, 0UL, 65536UL, 0UL, 0UL, 0UL, 0UL, 131072UL, | |
| | | 0UL, 0UL, 0UL, 0UL, 262144UL, 0UL, 0UL, 0UL, 0UL, 524288UL, 0UL, 0UL, 0UL, | |
| | | 0UL, 1048576UL, 0UL, 0UL, 0UL, 0UL, 2097152UL, 0UL, 0UL, 0UL, 0UL, 4194304U | |
| | | L, 0UL, 0UL, 0UL, 0UL, 8388608UL, 0UL, 0UL, 0UL, 0UL, 16777216UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 33554432UL, 0UL, 0UL, 0UL, 0UL, 67108864UL, 0UL, 0UL, 0UL, 0UL, | |
| | | 134217728UL, 0UL, 0UL, 0UL, 0UL, 268435456UL, 0UL, 0UL, 0UL, 0UL, 536870912 | |
| | | UL, 0UL, 0UL, 0UL, 0UL, 1073741824UL, 0UL, 0UL, 0UL, 0UL, 2147483648UL, 0UL | |
| | | , 0UL, 0UL, 0UL, 0UL, 1UL, 17UL, 0UL, 0UL, 0UL, 2UL, 34UL, 0UL, 0UL, 0UL, 4 | |
| | | UL, 68UL, 0UL, 0UL, 0UL, 8UL, 136UL, 0UL, 0UL, 0UL, 16UL, 272UL, 0UL, 0UL, | |
| | | 0UL, 32UL, 544UL, 0UL, 0UL, 0UL, 64UL, 1088UL, 0UL, 0UL, 0UL, 128UL, 2176UL | |
| | | , 0UL, 0UL, 0UL, 256UL, 4352UL, 0UL, 0UL, 0UL, 512UL, 8704UL, 0UL, 0UL, 0UL | |
| | | , 1024UL, 17408UL, 0UL, 0UL, 0UL, 2048UL, 34816UL, 0UL, 0UL, 0UL, 4096UL, 6 | |
| | | 9632UL, 0UL, 0UL, 0UL, 8192UL, 139264UL, 0UL, 0UL, 0UL, 16384UL, 278528UL, | |
| | | 0UL, 0UL, 0UL, 32768UL, 557056UL, 0UL, 0UL, 0UL, 65536UL, 1114112UL, 0UL, 0 | |
| | | UL, 0UL, 131072UL, 2228224UL, 0UL, 0UL, 0UL, 262144UL, 4456448UL, 0UL, 0UL, | |
| | | 0UL, 524288UL, 8912896UL, 0UL, 0UL, 0UL, 1048576UL, 17825792UL, 0UL, 0UL, | |
| | | 0UL, 2097152UL, 35651584UL, 0UL, 0UL, 0UL, 4194304UL, 71303168UL, 0UL, 0UL, | |
| | | 0UL, 8388608UL, 142606336UL, 0UL, 0UL, 0UL, 16777216UL, 285212672UL, 0UL, | |
| | | 0UL, 0UL, 33554432UL, 570425344UL, 0UL, 0UL, 0UL, 67108864UL, 1140850688UL, | |
| | | 0UL, 0UL, 0UL, 134217728UL, 2281701376UL, 0UL, 0UL, 0UL, 268435456UL, 2684 | |
| | | 35456UL, 0UL, 0UL, 0UL, 536870912UL, 536870912UL, 0UL, 0UL, 0UL, 1073741824 | |
| | | UL, 1073741824UL, 0UL, 0UL, 0UL, 2147483648UL, 2147483648UL, }, {0UL, 3UL, | |
| | | 51UL, 771UL, 13107UL, 0UL, 6UL, 102UL, 1542UL, 26214UL, 0UL, 15UL, 255UL, 3 | |
| | | 855UL, 65535UL, 0UL, 30UL, 510UL, 7710UL, 131070UL, 0UL, 60UL, 1020UL, 1542 | |
| | | 0UL, 262140UL, 0UL, 120UL, 2040UL, 30840UL, 524280UL, 0UL, 240UL, 4080UL, 6 | |
| | | 1680UL, 1048560UL, 0UL, 480UL, 8160UL, 123360UL, 2097120UL, 0UL, 960UL, 163 | |
| | | 20UL, 246720UL, 4194240UL, 0UL, 1920UL, 32640UL, 493440UL, 8388480UL, 0UL, | |
| | | 3840UL, 65280UL, 986880UL, 16776960UL, 0UL, 7680UL, 130560UL, 1973760UL, 33 | |
| | | 553920UL, 0UL, 15360UL, 261120UL, 3947520UL, 67107840UL, 0UL, 30720UL, 5222 | |
| | | 40UL, 7895040UL, 134215680UL, 0UL, 61440UL, 1044480UL, 15790080UL, 26843136 | |
| | | 0UL, 0UL, 122880UL, 2088960UL, 31580160UL, 536862720UL, 0UL, 245760UL, 4177 | |
| | | 920UL, 63160320UL, 1073725440UL, 0UL, 491520UL, 8355840UL, 126320640UL, 214 | |
| | | 7450880UL, 0UL, 983040UL, 16711680UL, 252641280UL, 4294901760UL, 0UL, 19660 | |
| | | 80UL, 33423360UL, 505282560UL, 4294836224UL, 0UL, 3932160UL, 66846720UL, 10 | |
| | | 10565120UL, 4294705152UL, 0UL, 7864320UL, 133693440UL, 2021130240UL, 429444 | |
| | | 3008UL, 0UL, 15728640UL, 267386880UL, 4042260480UL, 4293918720UL, 0UL, 3145 | |
| | | 7280UL, 534773760UL, 3789553664UL, 4292870144UL, 0UL, 62914560UL, 106954752 | |
| | | 0UL, 3284140032UL, 4290772992UL, 0UL, 125829120UL, 2139095040UL, 2273312768 | |
| | | UL, 4286578688UL, 0UL, 251658240UL, 4278190080UL, 251658240UL, 4278190080UL | |
| | | , 0UL, 503316480UL, 4261412864UL, 503316480UL, 4261412864UL, 0UL, 100663296 | |
| | | 0UL, 4227858432UL, 1006632960UL, 4227858432UL, 0UL, 2013265920UL, 416074956 | |
| | | 8UL, 2013265920UL, 4160749568UL, 0UL, 4026531840UL, 4026531840UL, 402653184 | |
| | | 0UL, 4026531840UL, 0UL, 3758096384UL, 3758096384UL, 3758096384UL, 375809638 | |
| | | 4UL, 0UL, 0UL, 3UL, 51UL, 771UL, 0UL, 0UL, 6UL, 102UL, 1542UL, 0UL, 0UL, 15 | |
| | | UL, 255UL, 3855UL, 0UL, 0UL, 30UL, 510UL, 7710UL, 0UL, 0UL, 60UL, 1020UL, 1 | |
| | | 5420UL, 0UL, 0UL, 120UL, 2040UL, 30840UL, 0UL, 0UL, 240UL, 4080UL, 61680UL, | |
| | | 0UL, 0UL, 480UL, 8160UL, 123360UL, 0UL, 0UL, 960UL, 16320UL, 246720UL, 0UL | |
| | | , 0UL, 1920UL, 32640UL, 493440UL, 0UL, 0UL, 3840UL, 65280UL, 986880UL, 0UL, | |
| | | 0UL, 7680UL, 130560UL, 1973760UL, 0UL, 0UL, 15360UL, 261120UL, 3947520UL, | |
| | | 0UL, 0UL, 30720UL, 522240UL, 7895040UL, 0UL, 0UL, 61440UL, 1044480UL, 15790 | |
| | | 080UL, 0UL, 0UL, 122880UL, 2088960UL, 31580160UL, 0UL, 0UL, 245760UL, 41779 | |
| | | 20UL, 63160320UL, 0UL, 0UL, 491520UL, 8355840UL, 126320640UL, 0UL, 0UL, 983 | |
| | | 040UL, 16711680UL, 252641280UL, 0UL, 0UL, 1966080UL, 33423360UL, 505282560U | |
| | | L, 0UL, 0UL, 3932160UL, 66846720UL, 1010565120UL, 0UL, 0UL, 7864320UL, 1336 | |
| | | 93440UL, 2021130240UL, 0UL, 0UL, 15728640UL, 267386880UL, 4042260480UL, 0UL | |
| | | , 0UL, 31457280UL, 534773760UL, 3789553664UL, 0UL, 0UL, 62914560UL, 1069547 | |
| | | 520UL, 3284140032UL, 0UL, 0UL, 125829120UL, 2139095040UL, 2273312768UL, 0UL | |
| | | , 0UL, 251658240UL, 4278190080UL, 251658240UL, 0UL, 0UL, 503316480UL, 42614 | |
| | | 12864UL, 503316480UL, 0UL, 0UL, 1006632960UL, 4227858432UL, 1006632960UL, 0 | |
| | | UL, 0UL, 2013265920UL, 4160749568UL, 2013265920UL, 0UL, 0UL, 4026531840UL, | |
| | | 4026531840UL, 4026531840UL, 0UL, 0UL, 3758096384UL, 3758096384UL, 375809638 | |
| | | 4UL, 0UL, 0UL, 0UL, 3UL, 51UL, 0UL, 0UL, 0UL, 6UL, 102UL, 0UL, 0UL, 0UL, 15 | |
| | | UL, 255UL, 0UL, 0UL, 0UL, 30UL, 510UL, 0UL, 0UL, 0UL, 60UL, 1020UL, 0UL, 0U | |
| | | L, 0UL, 120UL, 2040UL, 0UL, 0UL, 0UL, 240UL, 4080UL, 0UL, 0UL, 0UL, 480UL, | |
| | | 8160UL, 0UL, 0UL, 0UL, 960UL, 16320UL, 0UL, 0UL, 0UL, 1920UL, 32640UL, 0UL, | |
| | | 0UL, 0UL, 3840UL, 65280UL, 0UL, 0UL, 0UL, 7680UL, 130560UL, 0UL, 0UL, 0UL, | |
| | | 15360UL, 261120UL, 0UL, 0UL, 0UL, 30720UL, 522240UL, 0UL, 0UL, 0UL, 61440U | |
| | | L, 1044480UL, 0UL, 0UL, 0UL, 122880UL, 2088960UL, 0UL, 0UL, 0UL, 245760UL, | |
| | | 4177920UL, 0UL, 0UL, 0UL, 491520UL, 8355840UL, 0UL, 0UL, 0UL, 983040UL, 167 | |
| | | 11680UL, 0UL, 0UL, 0UL, 1966080UL, 33423360UL, 0UL, 0UL, 0UL, 3932160UL, 66 | |
| | | 846720UL, 0UL, 0UL, 0UL, 7864320UL, 133693440UL, 0UL, 0UL, 0UL, 15728640UL, | |
| | | 267386880UL, 0UL, 0UL, 0UL, 31457280UL, 534773760UL, 0UL, 0UL, 0UL, 629145 | |
| | | 60UL, 1069547520UL, 0UL, 0UL, 0UL, 125829120UL, 2139095040UL, 0UL, 0UL, 0UL | |
| | | , 251658240UL, 4278190080UL, 0UL, 0UL, 0UL, 503316480UL, 4261412864UL, 0UL, | |
| | | 0UL, 0UL, 1006632960UL, 4227858432UL, 0UL, 0UL, 0UL, 2013265920UL, 4160749 | |
| | | 568UL, 0UL, 0UL, 0UL, 4026531840UL, 4026531840UL, 0UL, 0UL, 0UL, 3758096384 | |
| | | UL, 3758096384UL, 0UL, 0UL, 0UL, 0UL, 3UL, 0UL, 0UL, 0UL, 0UL, 6UL, 0UL, 0U | |
| | | L, 0UL, 0UL, 15UL, 0UL, 0UL, 0UL, 0UL, 30UL, 0UL, 0UL, 0UL, 0UL, 60UL, 0UL, | |
| | | 0UL, 0UL, 0UL, 120UL, 0UL, 0UL, 0UL, 0UL, 240UL, 0UL, 0UL, 0UL, 0UL, 480UL | |
| | | , 0UL, 0UL, 0UL, 0UL, 960UL, 0UL, 0UL, 0UL, 0UL, 1920UL, 0UL, 0UL, 0UL, 0UL | |
| | | , 3840UL, 0UL, 0UL, 0UL, 0UL, 7680UL, 0UL, 0UL, 0UL, 0UL, 15360UL, 0UL, 0UL | |
| | | , 0UL, 0UL, 30720UL, 0UL, 0UL, 0UL, 0UL, 61440UL, 0UL, 0UL, 0UL, 0UL, 12288 | |
| | | 0UL, 0UL, 0UL, 0UL, 0UL, 245760UL, 0UL, 0UL, 0UL, 0UL, 491520UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 983040UL, 0UL, 0UL, 0UL, 0UL, 1966080UL, 0UL, 0UL, 0UL, 0UL, 3932 | |
| | | 160UL, 0UL, 0UL, 0UL, 0UL, 7864320UL, 0UL, 0UL, 0UL, 0UL, 15728640UL, 0UL, | |
| | | 0UL, 0UL, 0UL, 31457280UL, 0UL, 0UL, 0UL, 0UL, 62914560UL, 0UL, 0UL, 0UL, 0 | |
| | | UL, 125829120UL, 0UL, 0UL, 0UL, 0UL, 251658240UL, 0UL, 0UL, 0UL, 0UL, 50331 | |
| | | 6480UL, 0UL, 0UL, 0UL, 0UL, 1006632960UL, 0UL, 0UL, 0UL, 0UL, 2013265920UL, | |
| | | 0UL, 0UL, 0UL, 0UL, 4026531840UL, 0UL, 0UL, 0UL, 0UL, 3758096384UL, 1UL, 1 | |
| | | 7UL, 257UL, 4369UL, 65537UL, 2UL, 34UL, 514UL, 8738UL, 131074UL, 4UL, 68UL, | |
| | | 1028UL, 17476UL, 262148UL, 8UL, 136UL, 2056UL, 34952UL, 524296UL, 16UL, 27 | |
| | | 2UL, 4112UL, 69904UL, 1048592UL, 32UL, 544UL, 8224UL, 139808UL, 2097184UL, | |
| | | 64UL, 1088UL, 16448UL, 279616UL, 4194368UL, 128UL, 2176UL, 32896UL, 559232U | |
| | | L, 8388736UL, 256UL, 4352UL, 65792UL, 1118464UL, 16777472UL, 512UL, 8704UL, | |
| | | 131584UL, 2236928UL, 33554944UL, 1024UL, 17408UL, 263168UL, 4473856UL, 671 | |
| | | 09888UL, 2048UL, 34816UL, 526336UL, 8947712UL, 134219776UL, 4096UL, 69632UL | |
| | | , 1052672UL, 17895424UL, 268439552UL, 8192UL, 139264UL, 2105344UL, 35790848 | |
| | | UL, 536879104UL, 16384UL, 278528UL, 4210688UL, 71581696UL, 1073758208UL, 32 | |
| | | 768UL, 557056UL, 8421376UL, 143163392UL, 2147516416UL, 65536UL, 1114112UL, | |
| | | 16842752UL, 286326784UL, 65536UL, 131072UL, 2228224UL, 33685504UL, 57265356 | |
| | | 8UL, 131072UL, 262144UL, 4456448UL, 67371008UL, 1145307136UL, 262144UL, 524 | |
| | | 288UL, 8912896UL, 134742016UL, 2290614272UL, 524288UL, 1048576UL, 17825792U | |
| | | L, 269484032UL, 286261248UL, 1048576UL, 2097152UL, 35651584UL, 538968064UL, | |
| | | 572522496UL, 2097152UL, 4194304UL, 71303168UL, 1077936128UL, 1145044992UL, | |
| | | 4194304UL, 8388608UL, 142606336UL, 2155872256UL, 2290089984UL, 8388608UL, | |
| | | 16777216UL, 285212672UL, 16777216UL, 285212672UL, 16777216UL, 33554432UL, 5 | |
| | | 70425344UL, 33554432UL, 570425344UL, 33554432UL, 67108864UL, 1140850688UL, | |
| | | 67108864UL, 1140850688UL, 67108864UL, 134217728UL, 2281701376UL, 134217728U | |
| | | L, 2281701376UL, 134217728UL, 268435456UL, 268435456UL, 268435456UL, 268435 | |
| | | 456UL, 268435456UL, 536870912UL, 536870912UL, 536870912UL, 536870912UL, 536 | |
| | | 870912UL, 1073741824UL, 1073741824UL, 1073741824UL, 1073741824UL, 107374182 | |
| | | 4UL, 2147483648UL, 2147483648UL, 2147483648UL, 2147483648UL, 2147483648UL, | |
| | | }, {85009117UL, 335741939UL, 1412632518UL, 386859243UL, 1741437244UL, 15213 | |
| | | 9416UL, 403047142UL, 2556825231UL, 505087203UL, 4287193174UL, 335609039UL, | |
| | | 336528191UL, 1425998811UL, 456920088UL, 2832198590UL, 724748988UL, 36258456 | |
| | | 30UL, 1509824181UL, 3330088197UL, 2710488401UL, 1431742057UL, 1077674236UL, | |
| | | 1140592489UL, 2096905276UL, 3007294393UL, 2863484114UL, 1081606648UL, 1207 | |
| | | 443154UL, 972585080UL, 2793363314UL, 1432000919UL, 1089470704UL, 1341132452 | |
| | | UL, 3019109363UL, 2362285522UL, 1790260014UL, 2178941408UL, 2682264904UL, 1 | |
| | | 743251430UL, 429603751UL, 359294556UL, 62915520UL, 1069562512UL, 3486502860 | |
| | | UL, 859207501UL, 3939814584UL, 125831040UL, 2139125024UL, 2678038424UL, 171 | |
| | | 8415002UL, 363436400UL, 251662080UL, 4278250048UL, 1061109552UL, 3436830004 | |
| | | UL, 3948098272UL, 503324160UL, 4261532800UL, 2122219104UL, 2310257256UL, 38 | |
| | | 0003776UL, 1006648320UL, 4228098304UL, 4244438208UL, 3278337232UL, 39812330 | |
| | | 24UL, 2013296640UL, 4161229312UL, 4193909120UL, 2530142624UL, 446273280UL, | |
| | | 4026593280UL, 4027491328UL, 871625472UL, 4254978880UL, 4113772032UL, 375821 | |
| | | 9264UL, 3760015360UL, 2011686400UL, 3946555008UL, 711351296UL, 3221471232UL | |
| | | , 3225063424UL, 4291808256UL, 108481792UL, 2496444416UL, 2147975168UL, 2155 | |
| | | 159552UL, 4020213760UL, 485399040UL, 3919147008UL, 983040UL, 15351808UL, 25 | |
| | | 5799296UL, 3923588096UL, 322101248UL, 1966080UL, 299139072UL, 511598592UL, | |
| | | 3283773440UL, 3865427968UL, 3932160UL, 4087939072UL, 1023197184UL, 14672732 | |
| | | 16UL, 214663168UL, 7864320UL, 4149346304UL, 2046394368UL, 3202981888UL, 365 | |
| | | 0551808UL, 3236954112UL, 1050935296UL, 871563264UL, 2916302848UL, 193239449 | |
| | | 6UL, 2447376384UL, 1833435136UL, 2011561984UL, 2342944768UL, 643563520UL, 8 | |
| | | 68220928UL, 177209344UL, 4291559424UL, 122486784UL, 2360868864UL, 200487731 | |
| | | 2UL, 85983232UL, 4019716096UL, 3734634496UL, 3647995904UL, 1056964608UL, 36 | |
| | | 61627392UL, 254803968UL, 2905866240UL, 1658847232UL, 2113929216UL, 30282874 | |
| | | 88UL, 3730833408UL, 2322071552UL, 3586129920UL, 4227858432UL, 1761607680UL, | |
| | | 2092957696UL, 80740352UL, 2071986176UL, 4160749568UL, 3523215360UL, 964689 | |
| | | 920UL, 429916160UL, 3875536896UL, 4026531840UL, 2751463424UL, 1929379840UL, | |
| | | 4081057792UL, 503316480UL, 3758096384UL, 2281701376UL, 4127195136UL, 33973 | |
| | | 86240UL, 1316635UL, 85009117UL, 335741939UL, 1412632518UL, 386859243UL, 158 | |
| | | 0547UL, 152139416UL, 403047142UL, 2556825231UL, 505087203UL, 1317672UL, 335 | |
| | | 609039UL, 336528191UL, 1425998811UL, 456920088UL, 1574501UL, 724748988UL, 3 | |
| | | 625845630UL, 1509824181UL, 3330088197UL, 15612UL, 1431742057UL, 1077674236U | |
| | | L, 1140592489UL, 2096905276UL, 31224UL, 2863484114UL, 1081606648UL, 1207443 | |
| | | 154UL, 972585080UL, 62451UL, 1432000919UL, 1089470704UL, 1341132452UL, 3019 | |
| | | 109363UL, 124902UL, 1790260014UL, 2178941408UL, 2682264904UL, 1743251430UL, | |
| | | 249804UL, 359294556UL, 62915520UL, 1069562512UL, 3486502860UL, 499608UL, 3 | |
| | | 939814584UL, 125831040UL, 2139125024UL, 2678038424UL, 999216UL, 363436400UL | |
| | | , 251662080UL, 4278250048UL, 1061109552UL, 3223223904UL, 3948098272UL, 5033 | |
| | | 24160UL, 4261532800UL, 2122219104UL, 1077738688UL, 380003776UL, 1006648320U | |
| | | L, 4228098304UL, 4244438208UL, 1081735552UL, 3981233024UL, 2013296640UL, 41 | |
| | | 61229312UL, 4193909120UL, 1089729280UL, 446273280UL, 4026593280UL, 40274913 | |
| | | 28UL, 871625472UL, 2179458560UL, 4113772032UL, 3758219264UL, 3760015360UL, | |
| | | 2011686400UL, 63949824UL, 711351296UL, 3221471232UL, 3225063424UL, 42918082 | |
| | | 56UL, 127899648UL, 2496444416UL, 2147975168UL, 2155159552UL, 4020213760UL, | |
| | | 255799296UL, 3919147008UL, 983040UL, 15351808UL, 255799296UL, 3732824064UL, | |
| | | 322101248UL, 1966080UL, 299139072UL, 511598592UL, 2096939008UL, 3865427968 | |
| | | UL, 3932160UL, 4087939072UL, 1023197184UL, 972652544UL, 214663168UL, 786432 | |
| | | 0UL, 4149346304UL, 2046394368UL, 3019046912UL, 3650551808UL, 3236954112UL, | |
| | | 1050935296UL, 871563264UL, 1743126528UL, 1932394496UL, 2447376384UL, 183343 | |
| | | 5136UL, 2011561984UL, 3486253056UL, 643563520UL, 868220928UL, 177209344UL, | |
| | | 4291559424UL, 2677538816UL, 2360868864UL, 2004877312UL, 85983232UL, 4019716 | |
| | | 096UL, 1060110336UL, 3647995904UL, 1056964608UL, 3661627392UL, 254803968UL, | |
| | | 3193962496UL, 1658847232UL, 2113929216UL, 3028287488UL, 3730833408UL, 3166 | |
| | | 699520UL, 3586129920UL, 4227858432UL, 1761607680UL, 2092957696UL, 311217356 | |
| | | 8UL, 2071986176UL, 4160749568UL, 3523215360UL, 964689920UL, 1929379840UL, 3 | |
| | | 875536896UL, 4026531840UL, 2751463424UL, 1929379840UL, 4127195136UL, 503316 | |
| | | 480UL, 3758096384UL, 2281701376UL, 4127195136UL, 332854UL, 1316635UL, 85009 | |
| | | 117UL, 335741939UL, 1412632518UL, 596079UL, 1580547UL, 152139416UL, 4030471 | |
| | | 42UL, 2556825231UL, 1316075UL, 1317672UL, 335609039UL, 336528191UL, 1425998 | |
| | | 811UL, 2824661UL, 1574501UL, 724748988UL, 3625845630UL, 1509824181UL, 55714 | |
| | | 97UL, 15612UL, 1431742057UL, 1077674236UL, 1140592489UL, 11142994UL, 31224U | |
| | | L, 2863484114UL, 1081606648UL, 1207443154UL, 22285988UL, 62451UL, 143200091 | |
| | | 9UL, 1089470704UL, 1341132452UL, 44571976UL, 124902UL, 1790260014UL, 217894 | |
| | | 1408UL, 2682264904UL, 89143952UL, 249804UL, 359294556UL, 62915520UL, 106956 | |
| | | 2512UL, 178287904UL, 499608UL, 3939814584UL, 125831040UL, 2139125024UL, 356 | |
| | | 575808UL, 999216UL, 363436400UL, 251662080UL, 4278250048UL, 713151616UL, 32 | |
| | | 23223904UL, 3948098272UL, 503324160UL, 4261532800UL, 1426303232UL, 10777386 | |
| | | 88UL, 380003776UL, 1006648320UL, 4228098304UL, 2852606464UL, 1081735552UL, | |
| | | 3981233024UL, 2013296640UL, 4161229312UL, 1410245632UL, 1089729280UL, 44627 | |
| | | 3280UL, 4026593280UL, 4027491328UL, 1746749440UL, 2179458560UL, 4113772032U | |
| | | L, 3758219264UL, 3760015360UL, 272273408UL, 63949824UL, 711351296UL, 322147 | |
| | | 1232UL, 3225063424UL, 3765772288UL, 127899648UL, 2496444416UL, 2147975168UL | |
| | | , 2155159552UL, 15351808UL, 255799296UL, 3919147008UL, 983040UL, 15351808UL | |
| | | , 3251929088UL, 3732824064UL, 322101248UL, 1966080UL, 299139072UL, 11351490 | |
| | | 56UL, 2096939008UL, 3865427968UL, 3932160UL, 4087939072UL, 1196556288UL, 97 | |
| | | 2652544UL, 214663168UL, 7864320UL, 4149346304UL, 1319370752UL, 3019046912UL | |
| | | , 3650551808UL, 3236954112UL, 1050935296UL, 2638741504UL, 1743126528UL, 193 | |
| | | 2394496UL, 2447376384UL, 1833435136UL, 982515712UL, 3486253056UL, 643563520 | |
| | | UL, 868220928UL, 177209344UL, 1965031424UL, 2677538816UL, 2360868864UL, 200 | |
| | | 4877312UL, 85983232UL, 3930062848UL, 1060110336UL, 3647995904UL, 1056964608 | |
| | | UL, 3661627392UL, 3565158400UL, 3193962496UL, 1658847232UL, 2113929216UL, 3 | |
| | | 028287488UL, 2835349504UL, 3166699520UL, 3586129920UL, 4227858432UL, 176160 | |
| | | 7680UL, 1375731712UL, 3112173568UL, 2071986176UL, 4160749568UL, 3523215360U | |
| | | L, 2751463424UL, 1929379840UL, 3875536896UL, 4026531840UL, 2751463424UL, 22 | |
| | | 81701376UL, 4127195136UL, 503316480UL, 3758096384UL, 2281701376UL, 5123UL, | |
| | | 332854UL, 1316635UL, 85009117UL, 335741939UL, 6150UL, 596079UL, 1580547UL, | |
| | | 152139416UL, 403047142UL, 5135UL, 1316075UL, 1317672UL, 335609039UL, 336528 | |
| | | 191UL, 6174UL, 2824661UL, 1574501UL, 724748988UL, 3625845630UL, 60UL, 55714 | |
| | | 97UL, 15612UL, 1431742057UL, 1077674236UL, 120UL, 11142994UL, 31224UL, 2863 | |
| | | 484114UL, 1081606648UL, 240UL, 22285988UL, 62451UL, 1432000919UL, 108947070 | |
| | | 4UL, 480UL, 44571976UL, 124902UL, 1790260014UL, 2178941408UL, 960UL, 891439 | |
| | | 52UL, 249804UL, 359294556UL, 62915520UL, 1920UL, 178287904UL, 499608UL, 393 | |
| | | 9814584UL, 125831040UL, 3840UL, 356575808UL, 999216UL, 363436400UL, 2516620 | |
| | | 80UL, 7680UL, 713151616UL, 3223223904UL, 3948098272UL, 503324160UL, 15360UL | |
| | | , 1426303232UL, 1077738688UL, 380003776UL, 1006648320UL, 30720UL, 285260646 | |
| | | 4UL, 1081735552UL, 3981233024UL, 2013296640UL, 61440UL, 1410245632UL, 10897 | |
| | | 29280UL, 446273280UL, 4026593280UL, 122880UL, 1746749440UL, 2179458560UL, 4 | |
| | | 113772032UL, 3758219264UL, 245760UL, 272273408UL, 63949824UL, 711351296UL, | |
| | | 3221471232UL, 491520UL, 3765772288UL, 127899648UL, 2496444416UL, 2147975168 | |
| | | UL, 983040UL, 15351808UL, 255799296UL, 3919147008UL, 983040UL, 3223191552UL | |
| | | , 3251929088UL, 3732824064UL, 322101248UL, 1966080UL, 1077673984UL, 1135149 | |
| | | 056UL, 2096939008UL, 3865427968UL, 3932160UL, 1081606144UL, 1196556288UL, 9 | |
| | | 72652544UL, 214663168UL, 7864320UL, 1089470464UL, 1319370752UL, 3019046912U | |
| | | L, 3650551808UL, 3236954112UL, 2178940928UL, 2638741504UL, 1743126528UL, 19 | |
| | | 32394496UL, 2447376384UL, 62914560UL, 982515712UL, 3486253056UL, 643563520U | |
| | | L, 868220928UL, 125829120UL, 1965031424UL, 2677538816UL, 2360868864UL, 2004 | |
| | | 877312UL, 251658240UL, 3930062848UL, 1060110336UL, 3647995904UL, 1056964608 | |
| | | UL, 503316480UL, 3565158400UL, 3193962496UL, 1658847232UL, 2113929216UL, 10 | |
| | | 06632960UL, 2835349504UL, 3166699520UL, 3586129920UL, 4227858432UL, 2013265 | |
| | | 920UL, 1375731712UL, 3112173568UL, 2071986176UL, 4160749568UL, 4026531840UL | |
| | | , 2751463424UL, 1929379840UL, 3875536896UL, 4026531840UL, 3758096384UL, 228 | |
| | | 1701376UL, 4127195136UL, 503316480UL, 3758096384UL, 201392209UL, 3423671362 | |
| | | UL, 218366296UL, 3713336838UL, 206572594UL, 402785186UL, 2552372100UL, 4369 | |
| | | 28947UL, 3130605370UL, 463476848UL, 262468UL, 4461835UL, 68158800UL, 115870 | |
| | | 0908UL, 20971524UL, 524680UL, 8919318UL, 136513955UL, 2316537326UL, 2516585 | |
| | | 2UL, 3222274064UL, 3239051564UL, 3494187077UL, 3558090985UL, 3221225500UL, | |
| | | 2149580832UL, 2183135832UL, 2693406858UL, 2821214674UL, 2147483704UL, 41943 | |
| | | 68UL, 71304368UL, 1091846420UL, 1347462055UL, 64UL, 8388736UL, 142608736UL, | |
| | | 2183692840UL, 2694924110UL, 3221225600UL, 16777472UL, 285217472UL, 7241838 | |
| | | 4UL, 1094880924UL, 1342177536UL, 33554944UL, 570434944UL, 144836768UL, 2189 | |
| | | 761848UL, 2684355072UL, 67109888UL, 1140869888UL, 289673536UL, 84556400UL, | |
| | | 1073742848UL, 134219776UL, 2281739776UL, 579347072UL, 169112800UL, 21474856 | |
| | | 96UL, 268439552UL, 268512256UL, 1158694144UL, 69790144UL, 4096UL, 536879104 | |
| | | UL, 537024512UL, 2317388288UL, 3360805760UL, 8192UL, 1073758208UL, 10740490 | |
| | | 24UL, 339809280UL, 1352902400UL, 16384UL, 2147516416UL, 2148098048UL, 39008 | |
| | | 44032UL, 1632062976UL, 32768UL, 65536UL, 1228800UL, 17059840UL, 311335936UL | |
| | | , 65536UL, 131072UL, 2457600UL, 34119680UL, 622671872UL, 131072UL, 262144UL | |
| | | , 4915200UL, 68239360UL, 1245343744UL, 262144UL, 524288UL, 9830400UL, 13647 | |
| | | 8720UL, 2490687488UL, 524288UL, 1048576UL, 288096256UL, 272957440UL, 954843 | |
| | | 136UL, 3222274048UL, 2097152UL, 3797417984UL, 545914880UL, 2983428096UL, 21 | |
| | | 49580800UL, 4194304UL, 78643200UL, 1091829760UL, 2745630720UL, 4194304UL, 3 | |
| | | 229614080UL, 3378511872UL, 1109917696UL, 2270035968UL, 8388608UL, 135895449 | |
| | | 6UL, 1119879168UL, 1414529024UL, 513540096UL, 16777216UL, 2717908992UL, 223 | |
| | | 9758336UL, 2829058048UL, 1027080192UL, 33554432UL, 1140850688UL, 184549376U | |
| | | L, 1363148800UL, 2054160384UL, 3288334336UL, 2281701376UL, 369098752UL, 272 | |
| | | 6297600UL, 4108320768UL, 2281701376UL, 268435456UL, 738197504UL, 2231369728 | |
| | | UL, 968884224UL, 3959422976UL, 536870912UL, 1476395008UL, 167772160UL, 3011 | |
| | | 510272UL, 3355443200UL, 1073741824UL, 2952790016UL, 335544320UL, 1728053248 | |
| | | UL, 2147483648UL, 2147483648UL, 1610612736UL, 3892314112UL, 503316480UL, 0U | |
| | | L, }, {1939838472UL, 1412147404UL, 166205219UL, 1757484276UL, 2905930693UL, | |
| | | 2345662040UL, 2845657161UL, 253454719UL, 2661974169UL, 303781080UL, 407533 | |
| | | 1504UL, 31014156UL, 244538930UL, 3752264221UL, 992575155UL, 219309525UL, 24 | |
| | | 6620060UL, 215640989UL, 4125020723UL, 2016731730UL, 3236558869UL, 297169276 | |
| | | UL, 3293566751UL, 1867504216UL, 210423272UL, 2531663658UL, 499723753UL, 173 | |
| | | 0625896UL, 189236880UL, 3388575408UL, 2433358422UL, 1368961148UL, 313409684 | |
| | | 8UL, 2827836415UL, 3888822753UL, 4172043647UL, 3379360748UL, 2651760955UL, | |
| | | 1345081091UL, 627692776UL, 189423917UL, 1927379456UL, 4004336944UL, 2995932 | |
| | | 065UL, 1882016234UL, 2551113616UL, 1576396048UL, 1299792730UL, 2151240795UL | |
| | | , 2154814108UL, 4292139924UL, 3555849728UL, 943986992UL, 3169912733UL, 2631 | |
| | | 635779UL, 3478094562UL, 1285558544UL, 3716074330UL, 2780749859UL, 391110651 | |
| | | 0UL, 4175656994UL, 1731832828UL, 1275401375UL, 937322456UL, 3802094750UL, 1 | |
| | | 145506936UL, 1008905193UL, 1718801768UL, 645739137UL, 1356219146UL, 8278868 | |
| | | 16UL, 1722154800UL, 2242776733UL, 754630810UL, 772070504UL, 249481170UL, 26 | |
| | | 08123425UL, 2087201889UL, 3200968096UL, 3292110026UL, 841433255UL, 47754342 | |
| | | 7UL, 1878882709UL, 705347364UL, 4003860146UL, 3194913138UL, 2616490007UL, 3 | |
| | | 57561212UL, 2446098297UL, 2955680594UL, 2512991743UL, 637464579UL, 12091324 | |
| | | 55UL, 1341312804UL, 612108672UL, 2455017713UL, 1749147666UL, 4020226825UL, | |
| | | 2873924220UL, 499405095UL, 1837614076UL, 1227604028UL, 714577577UL, 1659502 | |
| | | 08UL, 442290261UL, 489077752UL, 216760440UL, 42151250UL, 426862080UL, 28102 | |
| | | 42474UL, 4112075489UL, 3514761468UL, 4101921371UL, 982512636UL, 500792667UL | |
| | | , 4286077681UL, 198050301UL, 1858712743UL, 2913642493UL, 3547545255UL, 3981 | |
| | | 929169UL, 2944140287UL, 2286578015UL, 3422343167UL, 1239123295UL, 202636739 | |
| | | 4UL, 3269986302UL, 3028402878UL, 2709637886UL, 1096011710UL, 294584132UL, 3 | |
| | | 086749695UL, 3324400975UL, 1164394495UL, 4290155855UL, 543687304UL, 4008517 | |
| | | 630UL, 836370334UL, 1876426750UL, 2362048414UL, 3578325264UL, 3221487612UL, | |
| | | 2671154748UL, 3395518460UL, 2018383420UL, 2131029536UL, 2165829624UL, 6976 | |
| | | 61816UL, 1336049656UL, 3309365624UL, 4259639360UL, 3423548400UL, 2416417776 | |
| | | UL, 1633698800UL, 1630071792UL, 41950336UL, 3423478496UL, 2885608160UL, 394 | |
| | | 3744224UL, 677380832UL, 4179285363UL, 1939838472UL, 1412147404UL, 166205219 | |
| | | UL, 1757484276UL, 3838244595UL, 2345662040UL, 2845657161UL, 253454719UL, 26 | |
| | | 61974169UL, 138737288UL, 4075331504UL, 31014156UL, 244538930UL, 3752264221U | |
| | | L, 1503392345UL, 219309525UL, 246620060UL, 215640989UL, 4125020723UL, 17594 | |
| | | 81152UL, 3236558869UL, 297169276UL, 3293566751UL, 1867504216UL, 3898070400U | |
| | | L, 2531663658UL, 499723753UL, 1730625896UL, 189236880UL, 2610231010UL, 2433 | |
| | | 358422UL, 1368961148UL, 3134096848UL, 2827836415UL, 3903474593UL, 417204364 | |
| | | 7UL, 3379360748UL, 2651760955UL, 1345081091UL, 1267864331UL, 189423917UL, 1 | |
| | | 927379456UL, 4004336944UL, 2995932065UL, 3452816347UL, 2551113616UL, 157639 | |
| | | 6048UL, 1299792730UL, 2151240795UL, 1222520631UL, 4292139924UL, 3555849728U | |
| | | L, 943986992UL, 3169912733UL, 3260130211UL, 3478094562UL, 1285558544UL, 371 | |
| | | 6074330UL, 2780749859UL, 3039362306UL, 4175656994UL, 1731832828UL, 12754013 | |
| | | 75UL, 937322456UL, 3236754932UL, 1145506936UL, 1008905193UL, 1718801768UL, | |
| | | 645739137UL, 1358079399UL, 827886816UL, 1722154800UL, 2242776733UL, 7546308 | |
| | | 10UL, 1748663943UL, 249481170UL, 2608123425UL, 2087201889UL, 3200968096UL, | |
| | | 698076610UL, 841433255UL, 477543427UL, 1878882709UL, 705347364UL, 369279499 | |
| | | 6UL, 3194913138UL, 2616490007UL, 357561212UL, 2446098297UL, 2771068186UL, 2 | |
| | | 512991743UL, 637464579UL, 1209132455UL, 1341312804UL, 27937268UL, 245501771 | |
| | | 3UL, 1749147666UL, 4020226825UL, 2873924220UL, 1673040956UL, 1837614076UL, | |
| | | 1227604028UL, 714577577UL, 165950208UL, 528340088UL, 489077752UL, 216760440 | |
| | | UL, 42151250UL, 426862080UL, 1646215396UL, 4112075489UL, 3514761468UL, 4101 | |
| | | 921371UL, 982512636UL, 2095821304UL, 4286077681UL, 198050301UL, 1858712743U | |
| | | L, 2913642493UL, 277300160UL, 3981929169UL, 2944140287UL, 2286578015UL, 342 | |
| | | 2343167UL, 1178044288UL, 2026367394UL, 3269986302UL, 3028402878UL, 27096378 | |
| | | 86UL, 2234191616UL, 294584132UL, 3086749695UL, 3324400975UL, 1164394495UL, | |
| | | 136978944UL, 543687304UL, 4008517630UL, 836370334UL, 1876426750UL, 32752537 | |
| | | 60UL, 3578325264UL, 3221487612UL, 2671154748UL, 3395518460UL, 3942394880UL, | |
| | | 2131029536UL, 2165829624UL, 697661816UL, 1336049656UL, 3265045504UL, 42596 | |
| | | 39360UL, 3423548400UL, 2416417776UL, 1633698800UL, 3943712768UL, 41950336UL | |
| | | , 3423478496UL, 2885608160UL, 3943744224UL, 2293593009UL, 4179285363UL, 193 | |
| | | 9838472UL, 1412147404UL, 166205219UL, 715714152UL, 3838244595UL, 2345662040 | |
| | | UL, 2845657161UL, 253454719UL, 3758048260UL, 138737288UL, 4075331504UL, 310 | |
| | | 14156UL, 244538930UL, 370671650UL, 1503392345UL, 219309525UL, 246620060UL, | |
| | | 215640989UL, 2219162331UL, 1759481152UL, 3236558869UL, 297169276UL, 3293566 | |
| | | 751UL, 135243402UL, 3898070400UL, 2531663658UL, 499723753UL, 1730625896UL, | |
| | | 3142293713UL, 2610231010UL, 2433358422UL, 1368961148UL, 3134096848UL, 48694 | |
| | | 9791UL, 3903474593UL, 4172043647UL, 3379360748UL, 2651760955UL, 3172880550U | |
| | | L, 1267864331UL, 189423917UL, 1927379456UL, 4004336944UL, 191463910UL, 3452 | |
| | | 816347UL, 2551113616UL, 1576396048UL, 1299792730UL, 4411574UL, 1222520631UL | |
| | | , 4292139924UL, 3555849728UL, 943986992UL, 3073348038UL, 3260130211UL, 3478 | |
| | | 094562UL, 1285558544UL, 3716074330UL, 3098363790UL, 3039362306UL, 417565699 | |
| | | 4UL, 1731832828UL, 1275401375UL, 468159532UL, 3236754932UL, 1145506936UL, 1 | |
| | | 008905193UL, 1718801768UL, 1092964081UL, 1358079399UL, 827886816UL, 1722154 | |
| | | 800UL, 2242776733UL, 53128947UL, 1748663943UL, 249481170UL, 2608123425UL, 2 | |
| | | 087201889UL, 1960144614UL, 698076610UL, 841433255UL, 477543427UL, 187888270 | |
| | | 9UL, 1505419004UL, 3692794996UL, 3194913138UL, 2616490007UL, 357561212UL, 2 | |
| | | 823143358UL, 2771068186UL, 2512991743UL, 637464579UL, 1209132455UL, 1991737 | |
| | | 212UL, 27937268UL, 2455017713UL, 1749147666UL, 4020226825UL, 2907896812UL, | |
| | | 1673040956UL, 1837614076UL, 1227604028UL, 714577577UL, 3633969112UL, 528340 | |
| | | 088UL, 489077752UL, 216760440UL, 42151250UL, 2886728356UL, 1646215396UL, 41 | |
| | | 12075489UL, 3514761468UL, 4101921371UL, 3507686008UL, 2095821304UL, 4286077 | |
| | | 681UL, 198050301UL, 1858712743UL, 1463806912UL, 277300160UL, 3981929169UL, | |
| | | 2944140287UL, 2286578015UL, 4137888640UL, 1178044288UL, 2026367394UL, 32699 | |
| | | 86302UL, 3028402878UL, 1276820224UL, 2234191616UL, 294584132UL, 3086749695U | |
| | | L, 3324400975UL, 4274031104UL, 136978944UL, 543687304UL, 4008517630UL, 8363 | |
| | | 70334UL, 2978609152UL, 3275253760UL, 3578325264UL, 3221487612UL, 2671154748 | |
| | | UL, 2296777728UL, 3942394880UL, 2131029536UL, 2165829624UL, 697661816UL, 10 | |
| | | 86645248UL, 3265045504UL, 4259639360UL, 3423548400UL, 2416417776UL, 2295121 | |
| | | 920UL, 3943712768UL, 41950336UL, 3423478496UL, 2885608160UL, 3290486993UL, | |
| | | 2293593009UL, 4179285363UL, 1939838472UL, 1412147404UL, 3718742914UL, 71571 | |
| | | 4152UL, 3838244595UL, 2345662040UL, 2845657161UL, 3251034248UL, 3758048260U | |
| | | L, 138737288UL, 4075331504UL, 31014156UL, 2257801369UL, 370671650UL, 150339 | |
| | | 2345UL, 219309525UL, 246620060UL, 1375177854UL, 2219162331UL, 1759481152UL, | |
| | | 3236558869UL, 297169276UL, 2981812236UL, 135243402UL, 3898070400UL, 253166 | |
| | | 3658UL, 499723753UL, 1103465850UL, 3142293713UL, 2610231010UL, 2433358422UL | |
| | | , 1368961148UL, 2570001060UL, 486949791UL, 3903474593UL, 4172043647UL, 3379 | |
| | | 360748UL, 1922171925UL, 3172880550UL, 1267864331UL, 189423917UL, 1927379456 | |
| | | UL, 1359812359UL, 191463910UL, 3452816347UL, 2551113616UL, 1576396048UL, 25 | |
| | | 18549525UL, 4411574UL, 1222520631UL, 4292139924UL, 3555849728UL, 949028615U | |
| | | L, 3073348038UL, 3260130211UL, 3478094562UL, 1285558544UL, 4113039486UL, 30 | |
| | | 98363790UL, 3039362306UL, 4175656994UL, 1731832828UL, 1827471372UL, 4681595 | |
| | | 32UL, 3236754932UL, 1145506936UL, 1008905193UL, 1626341859UL, 1092964081UL, | |
| | | 1358079399UL, 827886816UL, 1722154800UL, 1069547583UL, 53128947UL, 1748663 | |
| | | 943UL, 249481170UL, 2608123425UL, 3162506114UL, 1960144614UL, 698076610UL, | |
| | | 841433255UL, 477543427UL, 3641706484UL, 1505419004UL, 3692794996UL, 3194913 | |
| | | 138UL, 2616490007UL, 3623882586UL, 2823143358UL, 2771068186UL, 2512991743UL | |
| | | , 637464579UL, 16785012UL, 1991737212UL, 27937268UL, 2455017713UL, 17491476 | |
| | | 66UL, 2348825660UL, 2907896812UL, 1673040956UL, 1837614076UL, 1227604028UL, | |
| | | 2579527800UL, 3633969112UL, 528340088UL, 489077752UL, 216760440UL, 3628134 | |
| | | 628UL, 2886728356UL, 1646215396UL, 4112075489UL, 3514761468UL, 1602085368UL | |
| | | , 3507686008UL, 2095821304UL, 4286077681UL, 198050301UL, 2501362624UL, 1463 | |
| | | 806912UL, 277300160UL, 3981929169UL, 2944140287UL, 4112467840UL, 4137888640 | |
| | | UL, 1178044288UL, 2026367394UL, 3269986302UL, 3356184320UL, 1276820224UL, 2 | |
| | | 234191616UL, 294584132UL, 3086749695UL, 366387712UL, 4274031104UL, 13697894 | |
| | | 4UL, 543687304UL, 4008517630UL, 1006135296UL, 2978609152UL, 3275253760UL, 3 | |
| | | 578325264UL, 3221487612UL, 3104844800UL, 2296777728UL, 3942394880UL, 213102 | |
| | | 9536UL, 2165829624UL, 1874371584UL, 1086645248UL, 3265045504UL, 4259639360U | |
| | | L, 3423548400UL, 2975352832UL, 2295121920UL, 3943712768UL, 41950336UL, 3423 | |
| | | 478496UL, 989898496UL, 3410688577UL, 2331788830UL, 3546482013UL, 813828841U | |
| | | L, 1865093068UL, 3265457506UL, 3795669738UL, 2119696024UL, 4285651426UL, 33 | |
| | | 33834629UL, 3451487261UL, 2090324595UL, 1816963648UL, 932961512UL, 24707610 | |
| | | 29UL, 3401764108UL, 3421619354UL, 4199624502UL, 589386372UL, 879396240UL, 3 | |
| | | 372470254UL, 2693109296UL, 2424215996UL, 38442268UL, 1882087724UL, 17139760 | |
| | | 0UL, 2024561281UL, 183095586UL, 3282207272UL, 3402177296UL, 1859195498UL, 4 | |
| | | 13109947UL, 2839537944UL, 1632143648UL, 3742715856UL, 388696500UL, 17487037 | |
| | | 33UL, 3563198567UL, 3826785440UL, 2896086528UL, 3989037829UL, 1478787788UL, | |
| | | 1390277813UL, 2123320736UL, 3416516800UL, 2056564203UL, 2584895011UL, 1605 | |
| | | 192736UL, 2475623616UL, 3856499712UL, 3439657984UL, 708088129UL, 1501395566 | |
| | | UL, 1302184960UL, 1360092352UL, 1645630430UL, 1425230387UL, 3369488824UL, 2 | |
| | | 979863936UL, 869212432UL, 150548847UL, 1097557362UL, 655939640UL, 316553344 | |
| | | UL, 3761918508UL, 3958338094UL, 141744600UL, 1412214640UL, 1859689984UL, 32 | |
| | | 00680981UL, 3883058679UL, 999801880UL, 3946079738UL, 1876072704UL, 19438184 | |
| | | 9UL, 2177533995UL, 1584707624UL, 3053768410UL, 2593051904UL, 3458076673UL, | |
| | | 4047442835UL, 3545972808UL, 3441793178UL, 194975744UL, 1731731470UL, 416875 | |
| | | 5162UL, 2628944732UL, 2125675784UL, 3119906816UL, 960774145UL, 2646626078UL | |
| | | , 2152793157UL, 3049156634UL, 672464896UL, 3046932493UL, 3700727536UL, 2152 | |
| | | 335477UL, 575986696UL, 671940608UL, 2208366608UL, 1454456125UL, 937760016UL | |
| | | , 4103979069UL, 2737668096UL, 1179779104UL, 1030912634UL, 1041902112UL, 203 | |
| | | 2909434UL, 2274230272UL, 2089025605UL, 3050632421UL, 2428784965UL, 14065814 | |
| | | 9UL, 4254138368UL, 1745354889UL, 711584249UL, 2746523017UL, 2551006457UL, 1 | |
| | | 100808192UL, 1494221073UL, 3422999489UL, 2696954129UL, 976716737UL, 2653421 | |
| | | 568UL, 3806331426UL, 3690047362UL, 1481392674UL, 3817015170UL, 2353004544UL | |
| | | , 286262340UL, 2300534532UL, 4206449732UL, 15339268UL, 2894069760UL, 488376 | |
| | | 456UL, 1489927688UL, 1196583048UL, 652746248UL, 2214592512UL, 69904UL, 1006 | |
| | | 205200UL, 2322628880UL, 1229515024UL, 2617245696UL, 3423527456UL, 196495312 | |
| | | 0UL, 4260938272UL, 386199072UL, 1744830464UL, 1342444608UL, 1069330496UL, 2 | |
| | | 138592320UL, 3185897536UL, 1073741824UL, 1342493824UL, 3780942976UL, 177106 | |
| | | 6496UL, 2189433984UL, 2147483648UL, }, {1804684571UL, 2106089606UL, 1533056 | |
| | | 158UL, 2870216110UL, 3618155659UL, 3789871366UL, 4246691682UL, 3667072763UL | |
| | | , 1212241769UL, 3152390668UL, 2973497449UL, 2958641966UL, 2088805328UL, 717 | |
| | | 518631UL, 2401090860UL, 3606967204UL, 952637656UL, 59827581UL, 1291486682UL | |
| | | , 1499453515UL, 2053994857UL, 563998083UL, 4094000396UL, 1163546899UL, 1003 | |
| | | 843565UL, 654565639UL, 1070907026UL, 4217851863UL, 426034251UL, 1721352737U | |
| | | L, 278404469UL, 3899800390UL, 1063362170UL, 1162348262UL, 3153545093UL, 324 | |
| | | 9996223UL, 186674553UL, 2616406148UL, 3137968354UL, 1282784965UL, 149506805 | |
| | | 8UL, 3033760361UL, 2278144523UL, 3192245769UL, 719586342UL, 2602548287UL, 3 | |
| | | 386583150UL, 355354345UL, 3252815848UL, 2178056037UL, 2283016801UL, 3005955 | |
| | | 037UL, 3340254490UL, 802791670UL, 251122316UL, 3705188626UL, 1252262272UL, | |
| | | 3989036796UL, 3527490452UL, 2047131255UL, 1447170583UL, 3373930285UL, 28950 | |
| | | 37457UL, 209341805UL, 1820357643UL, 3712392731UL, 685796521UL, 1322920440UL | |
| | | , 814388470UL, 1357857147UL, 434430265UL, 2650681935UL, 1371566728UL, 58783 | |
| | | 716UL, 2273435933UL, 3498513198UL, 792571900UL, 1447808772UL, 3513385860UL, | |
| | | 99175889UL, 1105434360UL, 1484146625UL, 3327194068UL, 242672513UL, 3552105 | |
| | | 593UL, 1425844616UL, 2871928454UL, 1124633561UL, 607610433UL, 2130018608UL, | |
| | | 1610235673UL, 2844230432UL, 2748082340UL, 994392866UL, 450823250UL, 291253 | |
| | | 5126UL, 2574390988UL, 3974009252UL, 78696582UL, 649682891UL, 3980917176UL, | |
| | | 3221419689UL, 960695436UL, 729221508UL, 358358845UL, 3392407691UL, 47271100 | |
| | | 5UL, 295914899UL, 3005191796UL, 3078521977UL, 3370011868UL, 509135340UL, 19 | |
| | | 65939519UL, 2086465877UL, 2457949822UL, 1324152522UL, 762289386UL, 36186939 | |
| | | 97UL, 233730715UL, 2873984650UL, 31168606UL, 3367142977UL, 2851851305UL, 32 | |
| | | 51660053UL, 4209768406UL, 3298190175UL, 901235185UL, 1564391510UL, 23526865 | |
| | | 27UL, 1008150482UL, 578573310UL, 3462447127UL, 2482873876UL, 1790221257UL, | |
| | | 2255375608UL, 2335345651UL, 1381450613UL, 2866805101UL, 1495073163UL, 51990 | |
| | | 5259UL, 3184556473UL, 1076378339UL, 2692926127UL, 970097715UL, 4013407916UL | |
| | | , 4014350363UL, 2476927059UL, 1989070516UL, 2640060069UL, 1987784589UL, 188 | |
| | | 0989003UL, 3861138803UL, 451743296UL, 1987067871UL, 1975657871UL, 339781688 | |
| | | 2UL, 2309900530UL, 4108425851UL, 4063867233UL, 3319482186UL, 2621772886UL, | |
| | | 1804684571UL, 2106089606UL, 1533056158UL, 2870216110UL, 611557097UL, 378987 | |
| | | 1366UL, 4246691682UL, 3667072763UL, 1212241769UL, 3389551988UL, 2973497449U | |
| | | L, 2958641966UL, 2088805328UL, 717518631UL, 2460955430UL, 3606967204UL, 952 | |
| | | 637656UL, 59827581UL, 1291486682UL, 3531087304UL, 2053994857UL, 563998083UL | |
| | | , 4094000396UL, 1163546899UL, 1242934125UL, 654565639UL, 1070907026UL, 4217 | |
| | | 851863UL, 426034251UL, 3034416129UL, 278404469UL, 3899800390UL, 1063362170U | |
| | | L, 1162348262UL, 4258714417UL, 3249996223UL, 186674553UL, 2616406148UL, 313 | |
| | | 7968354UL, 639885806UL, 1495068058UL, 3033760361UL, 2278144523UL, 319224576 | |
| | | 9UL, 4159910300UL, 2602548287UL, 3386583150UL, 355354345UL, 3252815848UL, 1 | |
| | | 555885880UL, 2283016801UL, 3005955037UL, 3340254490UL, 802791670UL, 2948774 | |
| | | 612UL, 3705188626UL, 1252262272UL, 3989036796UL, 3527490452UL, 2107826711UL | |
| | | , 1447170583UL, 3373930285UL, 2895037457UL, 209341805UL, 3763367196UL, 3712 | |
| | | 392731UL, 685796521UL, 1322920440UL, 814388470UL, 1986168339UL, 434430265UL | |
| | | , 2650681935UL, 1371566728UL, 58783716UL, 1423189187UL, 3498513198UL, 79257 | |
| | | 1900UL, 1447808772UL, 3513385860UL, 315969823UL, 1105434360UL, 1484146625UL | |
| | | , 3327194068UL, 242672513UL, 3336228275UL, 1425844616UL, 2871928454UL, 1124 | |
| | | 633561UL, 607610433UL, 1762052458UL, 1610235673UL, 2844230432UL, 2748082340 | |
| | | UL, 994392866UL, 3771702243UL, 2912535126UL, 2574390988UL, 3974009252UL, 78 | |
| | | 696582UL, 1626628844UL, 3980917176UL, 3221419689UL, 960695436UL, 729221508U | |
| | | L, 382092233UL, 3392407691UL, 472711005UL, 295914899UL, 3005191796UL, 51429 | |
| | | 7204UL, 3370011868UL, 509135340UL, 1965939519UL, 2086465877UL, 3975975091UL | |
| | | , 1324152522UL, 762289386UL, 3618693997UL, 233730715UL, 455322516UL, 311686 | |
| | | 06UL, 3367142977UL, 2851851305UL, 3251660053UL, 3952189603UL, 3298190175UL, | |
| | | 901235185UL, 1564391510UL, 2352686527UL, 826181452UL, 578573310UL, 3462447 | |
| | | 127UL, 2482873876UL, 1790221257UL, 1529242773UL, 2335345651UL, 1381450613UL | |
| | | , 2866805101UL, 1495073163UL, 877718651UL, 3184556473UL, 1076378339UL, 2692 | |
| | | 926127UL, 970097715UL, 299344245UL, 4014350363UL, 2476927059UL, 1989070516U | |
| | | L, 2640060069UL, 3844531327UL, 1880989003UL, 3861138803UL, 451743296UL, 198 | |
| | | 7067871UL, 3272848161UL, 3397816882UL, 2309900530UL, 4108425851UL, 40638672 | |
| | | 33UL, 834288064UL, 2621772886UL, 1804684571UL, 2106089606UL, 1533056158UL, | |
| | | 304865970UL, 611557097UL, 3789871366UL, 4246691682UL, 3667072763UL, 2728206 | |
| | | 193UL, 3389551988UL, 2973497449UL, 2958641966UL, 2088805328UL, 3895037582UL | |
| | | , 2460955430UL, 3606967204UL, 952637656UL, 59827581UL, 2349212526UL, 353108 | |
| | | 7304UL, 2053994857UL, 563998083UL, 4094000396UL, 4028900485UL, 1242934125UL | |
| | | , 654565639UL, 1070907026UL, 4217851863UL, 1663452176UL, 3034416129UL, 2784 | |
| | | 04469UL, 3899800390UL, 1063362170UL, 2721441405UL, 4258714417UL, 3249996223 | |
| | | UL, 186674553UL, 2616406148UL, 4228837490UL, 639885806UL, 1495068058UL, 303 | |
| | | 3760361UL, 2278144523UL, 2820661772UL, 4159910300UL, 2602548287UL, 33865831 | |
| | | 50UL, 355354345UL, 1815256314UL, 1555885880UL, 2283016801UL, 3005955037UL, | |
| | | 3340254490UL, 2166514144UL, 2948774612UL, 3705188626UL, 1252262272UL, 39890 | |
| | | 36796UL, 751187322UL, 2107826711UL, 1447170583UL, 3373930285UL, 2895037457U | |
| | | L, 2809311944UL, 3763367196UL, 3712392731UL, 685796521UL, 1322920440UL, 936 | |
| | | 300677UL, 1986168339UL, 434430265UL, 2650681935UL, 1371566728UL, 1308015359 | |
| | | UL, 1423189187UL, 3498513198UL, 792571900UL, 1447808772UL, 3065349526UL, 31 | |
| | | 5969823UL, 1105434360UL, 1484146625UL, 3327194068UL, 1038676789UL, 33362282 | |
| | | 75UL, 1425844616UL, 2871928454UL, 1124633561UL, 2956422231UL, 1762052458UL, | |
| | | 1610235673UL, 2844230432UL, 2748082340UL, 3603862093UL, 3771702243UL, 2912 | |
| | | 535126UL, 2574390988UL, 3974009252UL, 1691332448UL, 1626628844UL, 398091717 | |
| | | 6UL, 3221419689UL, 960695436UL, 3120142427UL, 382092233UL, 3392407691UL, 47 | |
| | | 2711005UL, 295914899UL, 4101686983UL, 514297204UL, 3370011868UL, 509135340U | |
| | | L, 1965939519UL, 3015736706UL, 3975975091UL, 1324152522UL, 762289386UL, 361 | |
| | | 8693997UL, 2395097989UL, 455322516UL, 31168606UL, 3367142977UL, 2851851305U | |
| | | L, 30511955UL, 3952189603UL, 3298190175UL, 901235185UL, 1564391510UL, 26062 | |
| | | 98633UL, 826181452UL, 578573310UL, 3462447127UL, 2482873876UL, 4159642946UL | |
| | | , 1529242773UL, 2335345651UL, 1381450613UL, 2866805101UL, 1782913669UL, 877 | |
| | | 718651UL, 3184556473UL, 1076378339UL, 2692926127UL, 1730328819UL, 299344245 | |
| | | UL, 4014350363UL, 2476927059UL, 1989070516UL, 1425685614UL, 3844531327UL, 1 | |
| | | 880989003UL, 3861138803UL, 451743296UL, 889237383UL, 3272848161UL, 33978168 | |
| | | 82UL, 2309900530UL, 4108425851UL, 1155723231UL, 834288064UL, 2621772886UL, | |
| | | 1804684571UL, 2106089606UL, 2387009004UL, 304865970UL, 611557097UL, 3789871 | |
| | | 366UL, 4246691682UL, 1405709661UL, 2728206193UL, 3389551988UL, 2973497449UL | |
| | | , 2958641966UL, 3183906006UL, 3895037582UL, 2460955430UL, 3606967204UL, 952 | |
| | | 637656UL, 1345432763UL, 2349212526UL, 3531087304UL, 2053994857UL, 563998083 | |
| | | UL, 3749011414UL, 4028900485UL, 1242934125UL, 654565639UL, 1070907026UL, 10 | |
| | | 72342672UL, 1663452176UL, 3034416129UL, 278404469UL, 3899800390UL, 35666521 | |
| | | 88UL, 2721441405UL, 4258714417UL, 3249996223UL, 186674553UL, 4001263143UL, | |
| | | 4228837490UL, 639885806UL, 1495068058UL, 3033760361UL, 4278332644UL, 282066 | |
| | | 1772UL, 4159910300UL, 2602548287UL, 3386583150UL, 838831089UL, 1815256314UL | |
| | | , 1555885880UL, 2283016801UL, 3005955037UL, 3377397178UL, 2166514144UL, 294 | |
| | | 8774612UL, 3705188626UL, 1252262272UL, 2414422575UL, 751187322UL, 210782671 | |
| | | 1UL, 1447170583UL, 3373930285UL, 1253755033UL, 2809311944UL, 3763367196UL, | |
| | | 3712392731UL, 685796521UL, 3238624475UL, 936300677UL, 1986168339UL, 4344302 | |
| | | 65UL, 2650681935UL, 1642290570UL, 1308015359UL, 1423189187UL, 3498513198UL, | |
| | | 792571900UL, 173318140UL, 3065349526UL, 315969823UL, 1105434360UL, 1484146 | |
| | | 625UL, 4103797777UL, 1038676789UL, 3336228275UL, 1425844616UL, 2871928454UL | |
| | | , 1797745765UL, 2956422231UL, 1762052458UL, 1610235673UL, 2844230432UL, 218 | |
| | | 0656608UL, 3603862093UL, 3771702243UL, 2912535126UL, 2574390988UL, 11830983 | |
| | | 90UL, 1691332448UL, 1626628844UL, 3980917176UL, 3221419689UL, 2645203959UL, | |
| | | 3120142427UL, 382092233UL, 3392407691UL, 472711005UL, 1659659070UL, 410168 | |
| | | 6983UL, 514297204UL, 3370011868UL, 509135340UL, 483888155UL, 3015736706UL, | |
| | | 3975975091UL, 1324152522UL, 762289386UL, 1259948064UL, 2395097989UL, 455322 | |
| | | 516UL, 31168606UL, 3367142977UL, 339990414UL, 30511955UL, 3952189603UL, 329 | |
| | | 8190175UL, 901235185UL, 3097920065UL, 2606298633UL, 826181452UL, 578573310U | |
| | | L, 3462447127UL, 1548039839UL, 4159642946UL, 1529242773UL, 2335345651UL, 13 | |
| | | 81450613UL, 2173079994UL, 1782913669UL, 877718651UL, 3184556473UL, 10763783 | |
| | | 39UL, 1570275057UL, 1730328819UL, 299344245UL, 4014350363UL, 2476927059UL, | |
| | | 1845882881UL, 1425685614UL, 3844531327UL, 1880989003UL, 3861138803UL, 13224 | |
| | | 09081UL, 889237383UL, 3272848161UL, 3397816882UL, 2309900530UL, 3505447982U | |
| | | L, 3430136873UL, 1319796589UL, 4202423979UL, 3184732284UL, 2910356648UL, 25 | |
| | | 34615223UL, 3854465731UL, 768821792UL, 2205052576UL, 1348983754UL, 13002501 | |
| | | 88UL, 2919181738UL, 2520178732UL, 3967243685UL, 2646012002UL, 1784678658UL, | |
| | | 741302051UL, 3464753547UL, 194213376UL, 1482799064UL, 3009673860UL, 680824 | |
| | | 208UL, 741966796UL, 2381283369UL, 3022877171UL, 1619439814UL, 3961433610UL, | |
| | | 1331297670UL, 1100110820UL, 1311672539UL, 1122110615UL, 4056004850UL, 3413 | |
| | | 790176UL, 3148768822UL, 1242592694UL, 2925975727UL, 1879285134UL, 334328879 | |
| | | UL, 1318235222UL, 3140739559UL, 401691770UL, 3604288404UL, 3686496908UL, 77 | |
| | | 0670945UL, 199139043UL, 2092710473UL, 3914528993UL, 700991333UL, 2375775811 | |
| | | UL, 858137308UL, 3490050165UL, 2389078291UL, 1615607459UL, 3027969809UL, 82 | |
| | | 0012549UL, 2085659484UL, 2654485136UL, 2630408646UL, 196481396UL, 111967327 | |
| | | 4UL, 1026209692UL, 726501622UL, 2940737143UL, 3559571163UL, 2288027726UL, 1 | |
| | | 039212708UL, 929664536UL, 1061981465UL, 186058675UL, 3537656152UL, 84417679 | |
| | | 6UL, 2996217992UL, 1545798611UL, 3031020656UL, 2248030435UL, 1665857580UL, | |
| | | 2905758082UL, 1269201312UL, 3031275084UL, 4034872841UL, 983632400UL, 418850 | |
| | | 3190UL, 757119675UL, 2105920865UL, 4281032819UL, 2917801076UL, 3900010013UL | |
| | | , 3910997169UL, 1729751422UL, 562313247UL, 3070846353UL, 2564238664UL, 4050 | |
| | | 540186UL, 4258833501UL, 2270666053UL, 2207128401UL, 2990540001UL, 797768898 | |
| | | UL, 2288390225UL, 3230323685UL, 1974727440UL, 3327301426UL, 289857826UL, 35 | |
| | | 65889868UL, 2791014422UL, 2021097820UL, 3350378271UL, 3673707591UL, 2610067 | |
| | | 927UL, 4255789547UL, 2682856590UL, 12563128UL, 1397542366UL, 237149400UL, 2 | |
| | | 233707508UL, 3875573245UL, 2097374144UL, 175320773UL, 4103445984UL, 4089284 | |
| | | 323UL, 3610168130UL, 3084915964UL, 680145366UL, 2571684685UL, 1132894909UL, | |
| | | 104640024UL, 193765521UL, 2338202907UL, 895271448UL, 11499099UL, 179806641 | |
| | | 7UL, 1297412626UL, 2511347162UL, 3140535007UL, 2129963538UL, 700683199UL, 2 | |
| | | 609700278UL, 2953463279UL, 2290844145UL, 1871316353UL, 3993801787UL, 221941 | |
| | | 3182UL, 2954453701UL, 231283580UL, 1375331115UL, 207723994UL, 1799562537UL, | |
| | | 2056553564UL, 2513609799UL, 3542459627UL, 3173012714UL, 3923404932UL, 2178 | |
| | | 77755UL, 2095124912UL, 192024370UL, 1168134987UL, 1889598668UL, 3014873069U | |
| | | L, 2033573343UL, }, {3465348660UL, 3623545008UL, 3505902593UL, 838034830UL, | |
| | | 1338018789UL, 2595329276UL, 3367746385UL, 3197935201UL, 1439351946UL, 3585 | |
| | | 085571UL, 4165798087UL, 3634792639UL, 2359485974UL, 2772582925UL, 111018620 | |
| | | 3UL, 3771562484UL, 1508694157UL, 1564641206UL, 2801985736UL, 2446107936UL, | |
| | | 3849126897UL, 1842973671UL, 944408104UL, 2624631280UL, 2729080685UL, 373736 | |
| | | 8614UL, 858809173UL, 2289802345UL, 2428186575UL, 3114742765UL, 716011303UL, | |
| | | 3443810690UL, 814132610UL, 517432787UL, 614445393UL, 2930433345UL, 2911780 | |
| | | 98UL, 2117644502UL, 2749446703UL, 311745701UL, 365684723UL, 1705418876UL, 2 | |
| | | 213749318UL, 4011417220UL, 1842575651UL, 988348831UL, 94258998UL, 277115027 | |
| | | 2UL, 498058526UL, 1344827813UL, 2961955291UL, 262703473UL, 1404034822UL, 15 | |
| | | 66595865UL, 2522381203UL, 1706522206UL, 1203054806UL, 1273801539UL, 2070583 | |
| | | 465UL, 3913449936UL, 3231505231UL, 619636751UL, 3746997351UL, 4103027837UL, | |
| | | 1205468203UL, 3355878253UL, 3433356888UL, 107785753UL, 2779092609UL, 18696 | |
| | | 91566UL, 2555219983UL, 903319808UL, 3273374169UL, 2538926990UL, 979533870UL | |
| | | , 1356500860UL, 1661983738UL, 1380761625UL, 2919458459UL, 1041142798UL, 143 | |
| | | 0817627UL, 517007606UL, 1421570516UL, 2371447300UL, 2985632691UL, 368488935 | |
| | | 1UL, 3873926653UL, 788770697UL, 1854750277UL, 209332297UL, 1137299679UL, 84 | |
| | | 8527832UL, 3850486924UL, 4179307312UL, 2764470693UL, 1353191605UL, 41668919 | |
| | | 19UL, 2074703841UL, 3373997532UL, 2013528640UL, 701389744UL, 841917592UL, 2 | |
| | | 065742268UL, 2721848192UL, 2566956680UL, 3122896007UL, 1090761479UL, 921859 | |
| | | 028UL, 4086736376UL, 1837462309UL, 2579826431UL, 2436217134UL, 839037727UL, | |
| | | 1072086642UL, 614518622UL, 3764758228UL, 1501128342UL, 3669108708UL, 16014 | |
| | | 07381UL, 2899014005UL, 3268308948UL, 3337564231UL, 1986911578UL, 3379194930 | |
| | | UL, 1950365753UL, 2098537451UL, 51515980UL, 1176526086UL, 3213391582UL, 105 | |
| | | 9745735UL, 2273586703UL, 376085505UL, 1493749800UL, 3970342143UL, 162092524 | |
| | | 4UL, 2165301314UL, 2332030190UL, 1864098798UL, 276747442UL, 2776569227UL, 2 | |
| | | 992780663UL, 3027279789UL, 1074555384UL, 3481518659UL, 2499703783UL, 661805 | |
| | | 703UL, 3782305562UL, 9186074UL, 2357407210UL, 2355922343UL, 2024733363UL, 4 | |
| | | 85434612UL, 862379913UL, 1029706268UL, 1512726310UL, 3834948354UL, 14358928 | |
| | | 40UL, 3297980694UL, 2831553800UL, 2111416471UL, 711321697UL, 3465348660UL, | |
| | | 3623545008UL, 3505902593UL, 838034830UL, 1553436793UL, 2595329276UL, 336774 | |
| | | 6385UL, 3197935201UL, 1439351946UL, 3198044157UL, 4165798087UL, 3634792639U | |
| | | L, 2359485974UL, 2772582925UL, 836042976UL, 3771562484UL, 1508694157UL, 156 | |
| | | 4641206UL, 2801985736UL, 1190371491UL, 3849126897UL, 1842973671UL, 94440810 | |
| | | 4UL, 2624631280UL, 410746791UL, 3737368614UL, 858809173UL, 2289802345UL, 24 | |
| | | 28186575UL, 1542325976UL, 716011303UL, 3443810690UL, 814132610UL, 517432787 | |
| | | UL, 1649301063UL, 2930433345UL, 291178098UL, 2117644502UL, 2749446703UL, 39 | |
| | | 55511579UL, 365684723UL, 1705418876UL, 2213749318UL, 4011417220UL, 27536328 | |
| | | 62UL, 988348831UL, 94258998UL, 2771150272UL, 498058526UL, 3314106168UL, 296 | |
| | | 1955291UL, 262703473UL, 1404034822UL, 1566595865UL, 3590367097UL, 170652220 | |
| | | 6UL, 1203054806UL, 1273801539UL, 2070583465UL, 2340683261UL, 3231505231UL, | |
| | | 619636751UL, 3746997351UL, 4103027837UL, 2785398766UL, 3355878253UL, 343335 | |
| | | 6888UL, 107785753UL, 2779092609UL, 1608451840UL, 2555219983UL, 903319808UL, | |
| | | 3273374169UL, 2538926990UL, 645164419UL, 1356500860UL, 1661983738UL, 13807 | |
| | | 61625UL, 2919458459UL, 2260224548UL, 1430817627UL, 517007606UL, 1421570516U | |
| | | L, 2371447300UL, 1636004496UL, 3684889351UL, 3873926653UL, 788770697UL, 185 | |
| | | 4750277UL, 1345251011UL, 1137299679UL, 848527832UL, 3850486924UL, 417930731 | |
| | | 2UL, 3576574608UL, 1353191605UL, 4166891919UL, 2074703841UL, 3373997532UL, | |
| | | 183447754UL, 701389744UL, 841917592UL, 2065742268UL, 2721848192UL, 21092898 | |
| | | 91UL, 3122896007UL, 1090761479UL, 921859028UL, 4086736376UL, 2212730874UL, | |
| | | 2579826431UL, 2436217134UL, 839037727UL, 1072086642UL, 55934784UL, 37647582 | |
| | | 28UL, 1501128342UL, 3669108708UL, 1601407381UL, 516550987UL, 3268308948UL, | |
| | | 3337564231UL, 1986911578UL, 3379194930UL, 3973484473UL, 2098537451UL, 51515 | |
| | | 980UL, 1176526086UL, 3213391582UL, 4251661633UL, 2273586703UL, 376085505UL, | |
| | | 1493749800UL, 3970342143UL, 3190791788UL, 2165301314UL, 2332030190UL, 1864 | |
| | | 098798UL, 276747442UL, 2991976613UL, 2992780663UL, 3027279789UL, 1074555384 | |
| | | UL, 3481518659UL, 1399789494UL, 661805703UL, 3782305562UL, 9186074UL, 23574 | |
| | | 07210UL, 1942736967UL, 2024733363UL, 485434612UL, 862379913UL, 1029706268UL | |
| | | , 4122704494UL, 3834948354UL, 1435892840UL, 3297980694UL, 2831553800UL, 121 | |
| | | 0092654UL, 711321697UL, 3465348660UL, 3623545008UL, 3505902593UL, 344323119 | |
| | | 8UL, 1553436793UL, 2595329276UL, 3367746385UL, 3197935201UL, 1304974987UL, | |
| | | 3198044157UL, 4165798087UL, 3634792639UL, 2359485974UL, 3518323362UL, 83604 | |
| | | 2976UL, 3771562484UL, 1508694157UL, 1564641206UL, 3577633375UL, 1190371491U | |
| | | L, 3849126897UL, 1842973671UL, 944408104UL, 1854555112UL, 410746791UL, 3737 | |
| | | 368614UL, 858809173UL, 2289802345UL, 3622671731UL, 1542325976UL, 716011303U | |
| | | L, 3443810690UL, 814132610UL, 296197011UL, 1649301063UL, 2930433345UL, 2911 | |
| | | 78098UL, 2117644502UL, 1056271538UL, 3955511579UL, 365684723UL, 1705418876U | |
| | | L, 2213749318UL, 1258535671UL, 2753632862UL, 988348831UL, 94258998UL, 27711 | |
| | | 50272UL, 3669902097UL, 3314106168UL, 2961955291UL, 262703473UL, 1404034822U | |
| | | L, 1654433938UL, 3590367097UL, 1706522206UL, 1203054806UL, 1273801539UL, 24 | |
| | | 48138887UL, 2340683261UL, 3231505231UL, 619636751UL, 3746997351UL, 14540883 | |
| | | 94UL, 2785398766UL, 3355878253UL, 3433356888UL, 107785753UL, 689323470UL, 1 | |
| | | 608451840UL, 2555219983UL, 903319808UL, 3273374169UL, 1603842392UL, 6451644 | |
| | | 19UL, 1356500860UL, 1661983738UL, 1380761625UL, 2814639423UL, 2260224548UL, | |
| | | 1430817627UL, 517007606UL, 1421570516UL, 1938805701UL, 1636004496UL, 36848 | |
| | | 89351UL, 3873926653UL, 788770697UL, 4238900666UL, 1345251011UL, 1137299679U | |
| | | L, 848527832UL, 3850486924UL, 108793827UL, 3576574608UL, 1353191605UL, 4166 | |
| | | 891919UL, 2074703841UL, 3780897861UL, 183447754UL, 701389744UL, 841917592UL | |
| | | , 2065742268UL, 3036602746UL, 2109289891UL, 3122896007UL, 1090761479UL, 921 | |
| | | 859028UL, 3499985398UL, 2212730874UL, 2579826431UL, 2436217134UL, 839037727 | |
| | | UL, 3520354700UL, 55934784UL, 3764758228UL, 1501128342UL, 3669108708UL, 160 | |
| | | 1010847UL, 516550987UL, 3268308948UL, 3337564231UL, 1986911578UL, 270424178 | |
| | | 1UL, 3973484473UL, 2098537451UL, 51515980UL, 1176526086UL, 3602010532UL, 42 | |
| | | 51661633UL, 2273586703UL, 376085505UL, 1493749800UL, 2922957328UL, 31907917 | |
| | | 88UL, 2165301314UL, 2332030190UL, 1864098798UL, 1649666443UL, 2991976613UL, | |
| | | 2992780663UL, 3027279789UL, 1074555384UL, 2848531519UL, 1399789494UL, 6618 | |
| | | 05703UL, 3782305562UL, 9186074UL, 320781315UL, 1942736967UL, 2024733363UL, | |
| | | 485434612UL, 862379913UL, 3598892066UL, 4122704494UL, 3834948354UL, 1435892 | |
| | | 840UL, 3297980694UL, 545184652UL, 1210092654UL, 711321697UL, 3465348660UL, | |
| | | 3623545008UL, 1173753045UL, 3443231198UL, 1553436793UL, 2595329276UL, 33677 | |
| | | 46385UL, 2444634476UL, 1304974987UL, 3198044157UL, 4165798087UL, 3634792639 | |
| | | UL, 1837035806UL, 3518323362UL, 836042976UL, 3771562484UL, 1508694157UL, 28 | |
| | | 99021294UL, 3577633375UL, 1190371491UL, 3849126897UL, 1842973671UL, 1614215 | |
| | | 215UL, 1854555112UL, 410746791UL, 3737368614UL, 858809173UL, 525745365UL, 3 | |
| | | 622671731UL, 1542325976UL, 716011303UL, 3443810690UL, 566299749UL, 29619701 | |
| | | 1UL, 1649301063UL, 2930433345UL, 291178098UL, 1987532525UL, 1056271538UL, 3 | |
| | | 955511579UL, 365684723UL, 1705418876UL, 2321222760UL, 1258535671UL, 2753632 | |
| | | 862UL, 988348831UL, 94258998UL, 2986060366UL, 3669902097UL, 3314106168UL, 2 | |
| | | 961955291UL, 262703473UL, 604452796UL, 1654433938UL, 3590367097UL, 17065222 | |
| | | 06UL, 1203054806UL, 1894894069UL, 2448138887UL, 2340683261UL, 3231505231UL, | |
| | | 619636751UL, 6680729UL, 1454088394UL, 2785398766UL, 3355878253UL, 34333568 | |
| | | 88UL, 2025591660UL, 689323470UL, 1608451840UL, 2555219983UL, 903319808UL, 3 | |
| | | 430384385UL, 1603842392UL, 645164419UL, 1356500860UL, 1661983738UL, 2108736 | |
| | | 152UL, 2814639423UL, 2260224548UL, 1430817627UL, 517007606UL, 2973658959UL, | |
| | | 1938805701UL, 1636004496UL, 3684889351UL, 3873926653UL, 2283691941UL, 4238 | |
| | | 900666UL, 1345251011UL, 1137299679UL, 848527832UL, 45551112UL, 108793827UL, | |
| | | 3576574608UL, 1353191605UL, 4166891919UL, 3776615962UL, 3780897861UL, 1834 | |
| | | 47754UL, 701389744UL, 841917592UL, 3830639316UL, 3036602746UL, 2109289891UL | |
| | | , 3122896007UL, 1090761479UL, 1931255897UL, 3499985398UL, 2212730874UL, 257 | |
| | | 9826431UL, 2436217134UL, 3272166055UL, 3520354700UL, 55934784UL, 3764758228 | |
| | | UL, 1501128342UL, 1567864246UL, 1601010847UL, 516550987UL, 3268308948UL, 33 | |
| | | 37564231UL, 3918802424UL, 2704241781UL, 3973484473UL, 2098537451UL, 5151598 | |
| | | 0UL, 3551394489UL, 3602010532UL, 4251661633UL, 2273586703UL, 376085505UL, 8 | |
| | | 85459498UL, 2922957328UL, 3190791788UL, 2165301314UL, 2332030190UL, 3197056 | |
| | | 515UL, 1649666443UL, 2991976613UL, 2992780663UL, 3027279789UL, 2385348906UL | |
| | | , 2848531519UL, 1399789494UL, 661805703UL, 3782305562UL, 2163075465UL, 3207 | |
| | | 81315UL, 1942736967UL, 2024733363UL, 485434612UL, 2680597981UL, 3598892066U | |
| | | L, 4122704494UL, 3834948354UL, 1435892840UL, 2499644163UL, 2704575422UL, 25 | |
| | | 79557838UL, 673530532UL, 493730767UL, 1124557747UL, 1908629439UL, 282194950 | |
| | | 4UL, 1743112513UL, 2849457841UL, 2344409314UL, 3479159262UL, 4260973770UL, | |
| | | 2991970754UL, 3812641863UL, 2229319917UL, 2466968521UL, 1766353737UL, 32165 | |
| | | 91612UL, 2113272648UL, 364370737UL, 1893001758UL, 2608875275UL, 4224057183U | |
| | | L, 3546705413UL, 1999778009UL, 348872225UL, 2470564216UL, 1417878284UL, 270 | |
| | | 9790112UL, 3579129936UL, 2137971615UL, 4046639861UL, 2841156930UL, 39154473 | |
| | | 7UL, 2056567354UL, 737657378UL, 3877904725UL, 578930752UL, 1759172471UL, 33 | |
| | | 83278785UL, 1047197514UL, 649468151UL, 3452867243UL, 1792089520UL, 63936215 | |
| | | UL, 3909143729UL, 3753489875UL, 734314122UL, 2490530916UL, 3043874586UL, 15 | |
| | | 04812057UL, 59001199UL, 2493748676UL, 2552438622UL, 1889694845UL, 371539786 | |
| | | 0UL, 2817245010UL, 3841049206UL, 816106718UL, 2176130406UL, 640254735UL, 12 | |
| | | 376903UL, 3000264936UL, 3304116079UL, 1620334094UL, 2109391765UL, 134821095 | |
| | | 1UL, 2237645681UL, 1207768272UL, 1562894669UL, 2156631655UL, 1387193235UL, | |
| | | 3154858817UL, 633510901UL, 2312190757UL, 402878244UL, 2501565021UL, 2984409 | |
| | | 334UL, 4167491216UL, 3614267292UL, 3078552271UL, 971722322UL, 3065543880UL, | |
| | | 2307584190UL, 491480322UL, 2068673112UL, 1929780632UL, 178549964UL, 983979 | |
| | | 983UL, 2769314886UL, 4214442042UL, 2977609682UL, 25450683UL, 3075212658UL, | |
| | | 1571149568UL, 3531670561UL, 42782504UL, 425601306UL, 428715214UL, 497250251 | |
| | | UL, 693520802UL, 166426814UL, 1786382125UL, 2712003995UL, 3610802197UL, 207 | |
| | | 6490757UL, 404822980UL, 3953184772UL, 1655231947UL, 3594351577UL, 306823227 | |
| | | 4UL, 3771730346UL, 4110519574UL, 3534704897UL, 2375277865UL, 3597780202UL, | |
| | | 3472676002UL, 1350276449UL, 3218248239UL, 3589255283UL, 3253132633UL, 17698 | |
| | | 85529UL, 3792812294UL, 120332643UL, 1219374788UL, 3608889019UL, 2386099811U | |
| | | L, 858495304UL, 1284785543UL, 331370962UL, 2259419662UL, 2519864134UL, 3194 | |
| | | 739432UL, 2669074511UL, 2565559140UL, 3378072004UL, 2647801475UL, 265068954 | |
| | | UL, 1464416963UL, 1232787612UL, 4160089759UL, 2510685972UL, 670300081UL, 25 | |
| | | 09357766UL, 1981891975UL, 4161588397UL, 1371924626UL, 44760868UL, 634955171 | |
| | | UL, 1187096933UL, 3324788972UL, 3576888559UL, 2801347752UL, 3730298395UL, 1 | |
| | | 702170762UL, 4206083415UL, 741409141UL, 3649731355UL, 1025429529UL, }, {914 | |
| | | 44490UL, 628576944UL, 4069219862UL, 2253058925UL, 492354082UL, 1191182242UL | |
| | | , 1565180119UL, 2257613723UL, 456055162UL, 605712223UL, 953365104UL, 310463 | |
| | | 8527UL, 1133984729UL, 2662828416UL, 2134948274UL, 1921384447UL, 843719355UL | |
| | | , 588432962UL, 1734575434UL, 2924140067UL, 483396548UL, 3848838894UL, 31554 | |
| | | 76556UL, 1760928304UL, 4168059840UL, 3279827269UL, 2644461735UL, 4168565656 | |
| | | UL, 3951563569UL, 1276805504UL, 1708974143UL, 1878547888UL, 3465220024UL, 3 | |
| | | 062086782UL, 2801401651UL, 1510428126UL, 716404149UL, 1646021208UL, 3534932 | |
| | | 385UL, 1186585561UL, 651997355UL, 282914223UL, 352224857UL, 3764407517UL, 1 | |
| | | 059868753UL, 1971798134UL, 978904005UL, 976413661UL, 4039544152UL, 49898969 | |
| | | 3UL, 2565125471UL, 2782642813UL, 3537961025UL, 1194967362UL, 169217024UL, 3 | |
| | | 491609UL, 1319592872UL, 1630206561UL, 2497130840UL, 1685008996UL, 282894401 | |
| | | 6UL, 3301346775UL, 2893072371UL, 2606559798UL, 4026138031UL, 2664450619UL, | |
| | | 691091062UL, 1079640113UL, 1417637732UL, 4081852209UL, 2197910648UL, 231038 | |
| | | 2370UL, 1000957047UL, 959936499UL, 2844551811UL, 2272766890UL, 31122394UL, | |
| | | 2742925483UL, 1121884686UL, 57929089UL, 2468361281UL, 2982007782UL, 2371576 | |
| | | 893UL, 177782593UL, 3603584577UL, 672057044UL, 2108452841UL, 1671338057UL, | |
| | | 3386908223UL, 1243029765UL, 805157552UL, 1271858417UL, 1621249501UL, 180485 | |
| | | 1492UL, 1321010403UL, 751773221UL, 1517221627UL, 822709871UL, 104533154UL, | |
| | | 3578182264UL, 640541709UL, 421086624UL, 4233576392UL, 3729339369UL, 1974606 | |
| | | 44UL, 773140636UL, 2158026018UL, 1756785611UL, 4011575991UL, 3569445500UL, | |
| | | 736117181UL, 2456162322UL, 1168189787UL, 3651312675UL, 1070291988UL, 268231 | |
| | | 205UL, 541474497UL, 3316168972UL, 3546990856UL, 830417208UL, 725960194UL, 2 | |
| | | 044207227UL, 3188997938UL, 2383298579UL, 3350316374UL, 3575011225UL, 155311 | |
| | | 1865UL, 1285013027UL, 749371711UL, 766611716UL, 598195098UL, 2139882719UL, | |
| | | 2062405428UL, 3634702446UL, 3015263295UL, 223311969UL, 2622859522UL, 388849 | |
| | | 2701UL, 2955257225UL, 582625650UL, 3563756446UL, 2886083960UL, 1907546514UL | |
| | | , 454650902UL, 3287277541UL, 625828138UL, 2991888140UL, 1935326370UL, 40311 | |
| | | 52256UL, 702881509UL, 1427632724UL, 1345475301UL, 2577560804UL, 2858595147U | |
| | | L, 2533191188UL, 185662179UL, 536505093UL, 3747894147UL, 111551030UL, 37037 | |
| | | 3207UL, 2293908590UL, 91444490UL, 628576944UL, 4069219862UL, 2253058925UL, | |
| | | 1671484924UL, 1191182242UL, 1565180119UL, 2257613723UL, 456055162UL, 341109 | |
| | | 4744UL, 953365104UL, 3104638527UL, 1133984729UL, 2662828416UL, 2000630022UL | |
| | | , 1921384447UL, 843719355UL, 588432962UL, 1734575434UL, 3293926122UL, 48339 | |
| | | 6548UL, 3848838894UL, 3155476556UL, 1760928304UL, 146876953UL, 3279827269UL | |
| | | , 2644461735UL, 4168565656UL, 3951563569UL, 3976156700UL, 1708974143UL, 187 | |
| | | 8547888UL, 3465220024UL, 3062086782UL, 1999154400UL, 1510428126UL, 71640414 | |
| | | 9UL, 1646021208UL, 3534932385UL, 2479551429UL, 651997355UL, 282914223UL, 35 | |
| | | 2224857UL, 3764407517UL, 1275979651UL, 1971798134UL, 978904005UL, 976413661 | |
| | | UL, 4039544152UL, 300654823UL, 2565125471UL, 2782642813UL, 3537961025UL, 11 | |
| | | 94967362UL, 3123973648UL, 3491609UL, 1319592872UL, 1630206561UL, 2497130840 | |
| | | UL, 1437913158UL, 2828944016UL, 3301346775UL, 2893072371UL, 2606559798UL, 2 | |
| | | 153172585UL, 2664450619UL, 691091062UL, 1079640113UL, 1417637732UL, 1713723 | |
| | | 7UL, 2197910648UL, 2310382370UL, 1000957047UL, 959936499UL, 802137134UL, 22 | |
| | | 72766890UL, 31122394UL, 2742925483UL, 1121884686UL, 3909775167UL, 246836128 | |
| | | 1UL, 2982007782UL, 2371576893UL, 177782593UL, 3319492525UL, 672057044UL, 21 | |
| | | 08452841UL, 1671338057UL, 3386908223UL, 1878151473UL, 805157552UL, 12718584 | |
| | | 17UL, 1621249501UL, 1804851492UL, 3215921223UL, 751773221UL, 1517221627UL, | |
| | | 822709871UL, 104533154UL, 361845001UL, 640541709UL, 421086624UL, 4233576392 | |
| | | UL, 3729339369UL, 2655936801UL, 773140636UL, 2158026018UL, 1756785611UL, 40 | |
| | | 11575991UL, 587202971UL, 736117181UL, 2456162322UL, 1168189787UL, 365131267 | |
| | | 5UL, 2517883370UL, 268231205UL, 541474497UL, 3316168972UL, 3546990856UL, 20 | |
| | | 37251305UL, 725960194UL, 2044207227UL, 3188997938UL, 2383298579UL, 26650085 | |
| | | 87UL, 3575011225UL, 1553111865UL, 1285013027UL, 749371711UL, 2163964019UL, | |
| | | 598195098UL, 2139882719UL, 2062405428UL, 3634702446UL, 2788202059UL, 223311 | |
| | | 969UL, 2622859522UL, 3888492701UL, 2955257225UL, 740986174UL, 3563756446UL, | |
| | | 2886083960UL, 1907546514UL, 454650902UL, 2426323587UL, 625828138UL, 299188 | |
| | | 8140UL, 1935326370UL, 4031152256UL, 1831149435UL, 1427632724UL, 1345475301U | |
| | | L, 2577560804UL, 2858595147UL, 3977153945UL, 185662179UL, 536505093UL, 3747 | |
| | | 894147UL, 111551030UL, 4131587422UL, 2293908590UL, 91444490UL, 628576944UL, | |
| | | 4069219862UL, 2408189350UL, 1671484924UL, 1191182242UL, 1565180119UL, 2257 | |
| | | 613723UL, 1338069254UL, 3411094744UL, 953365104UL, 3104638527UL, 1133984729 | |
| | | UL, 631497759UL, 2000630022UL, 1921384447UL, 843719355UL, 588432962UL, 3280 | |
| | | 318959UL, 3293926122UL, 483396548UL, 3848838894UL, 3155476556UL, 1777918163 | |
| | | UL, 146876953UL, 3279827269UL, 2644461735UL, 4168565656UL, 2786264663UL, 39 | |
| | | 76156700UL, 1708974143UL, 1878547888UL, 3465220024UL, 2793923820UL, 1999154 | |
| | | 400UL, 1510428126UL, 716404149UL, 1646021208UL, 3102243824UL, 2479551429UL, | |
| | | 651997355UL, 282914223UL, 352224857UL, 3767702588UL, 1275979651UL, 1971798 | |
| | | 134UL, 978904005UL, 976413661UL, 1951622548UL, 300654823UL, 2565125471UL, 2 | |
| | | 782642813UL, 3537961025UL, 2186817324UL, 3123973648UL, 3491609UL, 131959287 | |
| | | 2UL, 1630206561UL, 1075424534UL, 1437913158UL, 2828944016UL, 3301346775UL, | |
| | | 2893072371UL, 207992406UL, 2153172585UL, 2664450619UL, 691091062UL, 1079640 | |
| | | 113UL, 3114255216UL, 17137237UL, 2197910648UL, 2310382370UL, 1000957047UL, | |
| | | 2548008553UL, 802137134UL, 2272766890UL, 31122394UL, 2742925483UL, 40694823 | |
| | | 73UL, 3909775167UL, 2468361281UL, 2982007782UL, 2371576893UL, 2807823912UL, | |
| | | 3319492525UL, 672057044UL, 2108452841UL, 1671338057UL, 12831353UL, 1878151 | |
| | | 473UL, 805157552UL, 1271858417UL, 1621249501UL, 461887094UL, 3215921223UL, | |
| | | 751773221UL, 1517221627UL, 822709871UL, 1317394918UL, 361845001UL, 64054170 | |
| | | 9UL, 421086624UL, 4233576392UL, 3385587450UL, 2655936801UL, 773140636UL, 21 | |
| | | 58026018UL, 1756785611UL, 1475601973UL, 587202971UL, 736117181UL, 245616232 | |
| | | 2UL, 1168189787UL, 911455077UL, 2517883370UL, 268231205UL, 541474497UL, 331 | |
| | | 6168972UL, 1500275507UL, 2037251305UL, 725960194UL, 2044207227UL, 318899793 | |
| | | 8UL, 2036633808UL, 2665008587UL, 3575011225UL, 1553111865UL, 1285013027UL, | |
| | | 87868216UL, 2163964019UL, 598195098UL, 2139882719UL, 2062405428UL, 51790730 | |
| | | 1UL, 2788202059UL, 223311969UL, 2622859522UL, 3888492701UL, 3926046234UL, 7 | |
| | | 40986174UL, 3563756446UL, 2886083960UL, 1907546514UL, 1911066215UL, 2426323 | |
| | | 587UL, 625828138UL, 2991888140UL, 1935326370UL, 2031853435UL, 1831149435UL, | |
| | | 1427632724UL, 1345475301UL, 2577560804UL, 3509674153UL, 3977153945UL, 1856 | |
| | | 62179UL, 536505093UL, 3747894147UL, 1711714600UL, 4131587422UL, 2293908590U | |
| | | L, 91444490UL, 628576944UL, 3370678255UL, 2408189350UL, 1671484924UL, 11911 | |
| | | 82242UL, 1565180119UL, 3786239592UL, 1338069254UL, 3411094744UL, 953365104U | |
| | | L, 3104638527UL, 3659647225UL, 631497759UL, 2000630022UL, 1921384447UL, 843 | |
| | | 719355UL, 3364831282UL, 3280318959UL, 3293926122UL, 483396548UL, 3848838894 | |
| | | UL, 3131266478UL, 1777918163UL, 146876953UL, 3279827269UL, 2644461735UL, 41 | |
| | | 56372383UL, 2786264663UL, 3976156700UL, 1708974143UL, 1878547888UL, 2168041 | |
| | | 590UL, 2793923820UL, 1999154400UL, 1510428126UL, 716404149UL, 3392113666UL, | |
| | | 3102243824UL, 2479551429UL, 651997355UL, 282914223UL, 2085613514UL, 376770 | |
| | | 2588UL, 1275979651UL, 1971798134UL, 978904005UL, 503506384UL, 1951622548UL, | |
| | | 300654823UL, 2565125471UL, 2782642813UL, 1458431750UL, 2186817324UL, 31239 | |
| | | 73648UL, 3491609UL, 1319592872UL, 452433679UL, 1075424534UL, 1437913158UL, | |
| | | 2828944016UL, 3301346775UL, 2333281307UL, 207992406UL, 2153172585UL, 266445 | |
| | | 0619UL, 691091062UL, 3553502652UL, 3114255216UL, 17137237UL, 2197910648UL, | |
| | | 2310382370UL, 3153689868UL, 2548008553UL, 802137134UL, 2272766890UL, 311223 | |
| | | 94UL, 468580641UL, 4069482373UL, 3909775167UL, 2468361281UL, 2982007782UL, | |
| | | 1445286890UL, 2807823912UL, 3319492525UL, 672057044UL, 2108452841UL, 175557 | |
| | | 7669UL, 12831353UL, 1878151473UL, 805157552UL, 1271858417UL, 2623540912UL, | |
| | | 461887094UL, 3215921223UL, 751773221UL, 1517221627UL, 3922191946UL, 1317394 | |
| | | 918UL, 361845001UL, 640541709UL, 421086624UL, 2173849516UL, 3385587450UL, 2 | |
| | | 655936801UL, 773140636UL, 2158026018UL, 1085377158UL, 1475601973UL, 5872029 | |
| | | 71UL, 736117181UL, 2456162322UL, 2158960374UL, 911455077UL, 2517883370UL, 2 | |
| | | 68231205UL, 541474497UL, 943191315UL, 1500275507UL, 2037251305UL, 725960194 | |
| | | UL, 2044207227UL, 2481150802UL, 2036633808UL, 2665008587UL, 3575011225UL, 1 | |
| | | 553111865UL, 2301231777UL, 87868216UL, 2163964019UL, 598195098UL, 213988271 | |
| | | 9UL, 2007840238UL, 517907301UL, 2788202059UL, 223311969UL, 2622859522UL, 15 | |
| | | 1920263UL, 3926046234UL, 740986174UL, 3563756446UL, 2886083960UL, 133893792 | |
| | | 8UL, 1911066215UL, 2426323587UL, 625828138UL, 2991888140UL, 2652286195UL, 2 | |
| | | 031853435UL, 1831149435UL, 1427632724UL, 1345475301UL, 289801789UL, 3509674 | |
| | | 153UL, 3977153945UL, 185662179UL, 536505093UL, 2727322952UL, 3980498348UL, | |
| | | 2529622213UL, 1903052964UL, 3564714651UL, 2281240568UL, 533384122UL, 277613 | |
| | | 480UL, 1815540358UL, 282763841UL, 3669112623UL, 2572859425UL, 195220178UL, | |
| | | 1210883545UL, 2359703600UL, 1187537824UL, 675732974UL, 325036095UL, 7080914 | |
| | | 65UL, 2556854604UL, 701006284UL, 2378459191UL, 1863513103UL, 2690918197UL, | |
| | | 4237307694UL, 1356483501UL, 2160905652UL, 521809106UL, 974368613UL, 3136010 | |
| | | 957UL, 2722488678UL, 3711515637UL, 2296341459UL, 4233729945UL, 1196247571UL | |
| | | , 3031398071UL, 515543502UL, 1314129776UL, 3235373306UL, 1303165859UL, 1820 | |
| | | 568009UL, 559099351UL, 186876368UL, 1076102111UL, 1218809551UL, 1790301111U | |
| | | L, 4130210229UL, 768125358UL, 1132864749UL, 4262563773UL, 2294411020UL, 409 | |
| | | 2943985UL, 2558108246UL, 3737664949UL, 2219923393UL, 724326159UL, 413410568 | |
| | | 2UL, 4188752746UL, 3615233671UL, 1526018731UL, 2281637916UL, 2459490295UL, | |
| | | 3637342666UL, 777862587UL, 39962002UL, 3772005832UL, 997473319UL, 574843584 | |
| | | UL, 3356551974UL, 1265234427UL, 1698059437UL, 534747571UL, 1465532164UL, 32 | |
| | | 63029035UL, 534512444UL, 2343092827UL, 2375685652UL, 2497926141UL, 23779336 | |
| | | 21UL, 2212335180UL, 261114084UL, 172755755UL, 2737085495UL, 2225257145UL, 1 | |
| | | 48605658UL, 1353911796UL, 357753009UL, 1778732943UL, 497635558UL, 413646797 | |
| | | 6UL, 2837964962UL, 4045039047UL, 2485296762UL, 1587587183UL, 4042904168UL, | |
| | | 3184240963UL, 2393293696UL, 915444966UL, 2299938515UL, 3351580749UL, 506575 | |
| | | 598UL, 1541916825UL, 3465300401UL, 525927458UL, 681152801UL, 331660975UL, 3 | |
| | | 624685846UL, 2994172100UL, 3274369082UL, 3638287602UL, 815689760UL, 1710961 | |
| | | 092UL, 2775607076UL, 2175058103UL, 3252688367UL, 2936890483UL, 2746319120UL | |
| | | , 2736754UL, 1646031035UL, 2448701214UL, 2886833213UL, 3689830606UL, 329279 | |
| | | 8106UL, 300773646UL, 3125160783UL, 1247453205UL, 2746275624UL, 4011063775UL | |
| | | , 904135764UL, 876847374UL, 366267234UL, 2541269205UL, 131376648UL, 1805948 | |
| | | 133UL, 3383589530UL, 2350119829UL, 2513170439UL, 4096158499UL, 4229211520UL | |
| | | , 2992048272UL, 1338522080UL, 1187391335UL, 2898563453UL, 2163088451UL, 141 | |
| | | 7971677UL, 2047421551UL, 902282791UL, 1143943232UL, 3568431811UL, 405986199 | |
| | | 3UL, 193362198UL, 2509297125UL, 3968551582UL, 2175686117UL, 3568936881UL, 1 | |
| | | 853177468UL, 2134063169UL, 2919389416UL, 1124914545UL, 1209806738UL, }, {11 | |
| | | 99972651UL, 1035834631UL, 3177798370UL, 860834162UL, 3741677748UL, 37803278 | |
| | | 29UL, 1693730265UL, 1643429511UL, 559568669UL, 2758650294UL, 647308222UL, 3 | |
| | | 901603996UL, 1778653821UL, 3618523672UL, 2154201067UL, 4261179460UL, 328576 | |
| | | 4480UL, 3334002738UL, 3215795953UL, 91368462UL, 1883994950UL, 1506873376UL, | |
| | | 1527780962UL, 4046354597UL, 4081676034UL, 2389066602UL, 1574939945UL, 4278 | |
| | | 45396UL, 2714836263UL, 1259019491UL, 2493238133UL, 2584034689UL, 3151382431 | |
| | | UL, 2171033919UL, 176883719UL, 2031844862UL, 1272380790UL, 1298975901UL, 40 | |
| | | 87222847UL, 1524000054UL, 311436877UL, 3627785554UL, 1889491722UL, 29380691 | |
| | | 93UL, 2771940687UL, 2756955968UL, 4289348777UL, 263514583UL, 887207028UL, 3 | |
| | | 522902525UL, 2273246349UL, 835377715UL, 2897243319UL, 204645450UL, 17759119 | |
| | | 83UL, 639470242UL, 2856296318UL, 3032942383UL, 2845501282UL, 1979082575UL, | |
| | | 202834023UL, 1876303820UL, 1434703409UL, 4240524132UL, 848853780UL, 4188621 | |
| | | 628UL, 928095314UL, 876412914UL, 3446576392UL, 3235688990UL, 4021419931UL, | |
| | | 2483628986UL, 3155781890UL, 399997246UL, 1642535200UL, 3872575068UL, 157795 | |
| | | 6550UL, 3606228634UL, 609914462UL, 653194726UL, 4048067248UL, 2500767965UL, | |
| | | 1125167825UL, 3707628088UL, 1819135158UL, 1875618971UL, 3865851141UL, 3282 | |
| | | 15079UL, 1695889194UL, 2040280471UL, 3384684457UL, 2540504961UL, 293050253U | |
| | | L, 525570078UL, 2655676443UL, 1392199429UL, 3370444585UL, 1937915855UL, 222 | |
| | | 9636250UL, 247937142UL, 2534538765UL, 365841057UL, 2449431033UL, 2456532429 | |
| | | UL, 101910696UL, 1247069485UL, 1523958293UL, 2473285670UL, 473709728UL, 302 | |
| | | 6667113UL, 2071968844UL, 324025193UL, 423064436UL, 3870800061UL, 3977393138 | |
| | | UL, 3632553233UL, 352757977UL, 1584833348UL, 3173248650UL, 1159857686UL, 15 | |
| | | 01841977UL, 1751860798UL, 617281070UL, 1958012761UL, 4031667102UL, 32321423 | |
| | | 21UL, 3087428595UL, 2380824676UL, 1194087757UL, 1542961747UL, 4163350364UL, | |
| | | 1721646249UL, 1672791861UL, 2900511710UL, 24973500UL, 1705444176UL, 713642 | |
| | | 505UL, 3017719513UL, 2090715200UL, 3521434070UL, 37117223UL, 1948295454UL, | |
| | | 3055840561UL, 3476120789UL, 3994249388UL, 527899063UL, 4285770666UL, 107552 | |
| | | 4023UL, 2594223535UL, 392943522UL, 171012646UL, 3515750082UL, 3414659054UL, | |
| | | 3501852926UL, 1493283737UL, 2662104279UL, 2033464928UL, 90134967UL, 363058 | |
| | | 647UL, 3289266998UL, 2470752727UL, 1199972651UL, 1035834631UL, 3177798370UL | |
| | | , 860834162UL, 1791097822UL, 3780327829UL, 1693730265UL, 1643429511UL, 5595 | |
| | | 68669UL, 3503319486UL, 647308222UL, 3901603996UL, 1778653821UL, 3618523672U | |
| | | L, 4294594427UL, 4261179460UL, 3285764480UL, 3334002738UL, 3215795953UL, 21 | |
| | | 2518363UL, 1883994950UL, 1506873376UL, 1527780962UL, 4046354597UL, 23986556 | |
| | | 00UL, 2389066602UL, 1574939945UL, 427845396UL, 2714836263UL, 2744363872UL, | |
| | | 2493238133UL, 2584034689UL, 3151382431UL, 2171033919UL, 2787053497UL, 20318 | |
| | | 44862UL, 1272380790UL, 1298975901UL, 4087222847UL, 2342953154UL, 311436877U | |
| | | L, 3627785554UL, 1889491722UL, 2938069193UL, 2026656505UL, 2756955968UL, 42 | |
| | | 89348777UL, 263514583UL, 887207028UL, 2097276163UL, 2273246349UL, 835377715 | |
| | | UL, 2897243319UL, 204645450UL, 4233399907UL, 639470242UL, 2856296318UL, 303 | |
| | | 2942383UL, 2845501282UL, 28260330UL, 202834023UL, 1876303820UL, 1434703409U | |
| | | L, 4240524132UL, 2455670466UL, 4188621628UL, 928095314UL, 876412914UL, 3446 | |
| | | 576392UL, 117581687UL, 4021419931UL, 2483628986UL, 3155781890UL, 399997246U | |
| | | L, 4254101087UL, 3872575068UL, 1577956550UL, 3606228634UL, 609914462UL, 400 | |
| | | 3279048UL, 4048067248UL, 2500767965UL, 1125167825UL, 3707628088UL, 92202051 | |
| | | 5UL, 1875618971UL, 3865851141UL, 328215079UL, 1695889194UL, 625773097UL, 33 | |
| | | 84684457UL, 2540504961UL, 293050253UL, 525570078UL, 2592805114UL, 139219942 | |
| | | 9UL, 3370444585UL, 1937915855UL, 2229636250UL, 3190958614UL, 2534538765UL, | |
| | | 365841057UL, 2449431033UL, 2456532429UL, 3778669305UL, 1247069485UL, 152395 | |
| | | 8293UL, 2473285670UL, 473709728UL, 720895889UL, 2071968844UL, 324025193UL, | |
| | | 423064436UL, 3870800061UL, 3535536111UL, 3632553233UL, 352757977UL, 1584833 | |
| | | 348UL, 3173248650UL, 2649344603UL, 1501841977UL, 1751860798UL, 617281070UL, | |
| | | 1958012761UL, 778965559UL, 3232142321UL, 3087428595UL, 2380824676UL, 11940 | |
| | | 87757UL, 3880222002UL, 4163350364UL, 1721646249UL, 1672791861UL, 2900511710 | |
| | | UL, 702936770UL, 1705444176UL, 713642505UL, 3017719513UL, 2090715200UL, 147 | |
| | | 7858694UL, 37117223UL, 1948295454UL, 3055840561UL, 3476120789UL, 464173532U | |
| | | L, 527899063UL, 4285770666UL, 1075524023UL, 2594223535UL, 2872629966UL, 171 | |
| | | 012646UL, 3515750082UL, 3414659054UL, 3501852926UL, 1631555059UL, 266210427 | |
| | | 9UL, 2033464928UL, 90134967UL, 363058647UL, 4112991722UL, 2470752727UL, 119 | |
| | | 9972651UL, 1035834631UL, 3177798370UL, 4152098951UL, 1791097822UL, 37803278 | |
| | | 29UL, 1693730265UL, 1643429511UL, 153020604UL, 3503319486UL, 647308222UL, 3 | |
| | | 901603996UL, 1778653821UL, 221887019UL, 4294594427UL, 4261179460UL, 3285764 | |
| | | 480UL, 3334002738UL, 3340918862UL, 212518363UL, 1883994950UL, 1506873376UL, | |
| | | 1527780962UL, 430180116UL, 2398655600UL, 2389066602UL, 1574939945UL, 42784 | |
| | | 5396UL, 1683639957UL, 2744363872UL, 2493238133UL, 2584034689UL, 3151382431U | |
| | | L, 752704472UL, 2787053497UL, 2031844862UL, 1272380790UL, 1298975901UL, 152 | |
| | | 8220628UL, 2342953154UL, 311436877UL, 3627785554UL, 1889491722UL, 257649546 | |
| | | 7UL, 2026656505UL, 2756955968UL, 4289348777UL, 263514583UL, 3778019638UL, 2 | |
| | | 097276163UL, 2273246349UL, 835377715UL, 2897243319UL, 1060067446UL, 4233399 | |
| | | 907UL, 639470242UL, 2856296318UL, 3032942383UL, 2351047932UL, 28260330UL, 2 | |
| | | 02834023UL, 1876303820UL, 1434703409UL, 3094305336UL, 2455670466UL, 4188621 | |
| | | 628UL, 928095314UL, 876412914UL, 3785385583UL, 117581687UL, 4021419931UL, 2 | |
| | | 483628986UL, 3155781890UL, 1867816730UL, 4254101087UL, 3872575068UL, 157795 | |
| | | 6550UL, 3606228634UL, 3081878598UL, 4003279048UL, 4048067248UL, 2500767965U | |
| | | L, 1125167825UL, 928465955UL, 922020515UL, 1875618971UL, 3865851141UL, 3282 | |
| | | 15079UL, 173810260UL, 625773097UL, 3384684457UL, 2540504961UL, 293050253UL, | |
| | | 2645143254UL, 2592805114UL, 1392199429UL, 3370444585UL, 1937915855UL, 1627 | |
| | | 81360UL, 3190958614UL, 2534538765UL, 365841057UL, 2449431033UL, 3105377832U | |
| | | L, 3778669305UL, 1247069485UL, 1523958293UL, 2473285670UL, 800971948UL, 720 | |
| | | 895889UL, 2071968844UL, 324025193UL, 423064436UL, 52577992UL, 3535536111UL, | |
| | | 3632553233UL, 352757977UL, 1584833348UL, 3305908059UL, 2649344603UL, 15018 | |
| | | 41977UL, 1751860798UL, 617281070UL, 264880505UL, 778965559UL, 3232142321UL, | |
| | | 3087428595UL, 2380824676UL, 1127761012UL, 3880222002UL, 4163350364UL, 1721 | |
| | | 646249UL, 1672791861UL, 2368512339UL, 702936770UL, 1705444176UL, 713642505U | |
| | | L, 3017719513UL, 197200752UL, 1477858694UL, 37117223UL, 1948295454UL, 30558 | |
| | | 40561UL, 1588372042UL, 464173532UL, 527899063UL, 4285770666UL, 1075524023UL | |
| | | , 2124039914UL, 2872629966UL, 171012646UL, 3515750082UL, 3414659054UL, 8185 | |
| | | 71456UL, 1631555059UL, 2662104279UL, 2033464928UL, 90134967UL, 952712086UL, | |
| | | 4112991722UL, 2470752727UL, 1199972651UL, 1035834631UL, 888975816UL, 41520 | |
| | | 98951UL, 1791097822UL, 3780327829UL, 1693730265UL, 3406785510UL, 153020604U | |
| | | L, 3503319486UL, 647308222UL, 3901603996UL, 3753248472UL, 221887019UL, 4294 | |
| | | 594427UL, 4261179460UL, 3285764480UL, 1861431346UL, 3340918862UL, 212518363 | |
| | | UL, 1883994950UL, 1506873376UL, 2695939612UL, 430180116UL, 2398655600UL, 23 | |
| | | 89066602UL, 1574939945UL, 2852159074UL, 1683639957UL, 2744363872UL, 2493238 | |
| | | 133UL, 2584034689UL, 1952065633UL, 752704472UL, 2787053497UL, 2031844862UL, | |
| | | 1272380790UL, 3530505866UL, 1528220628UL, 2342953154UL, 311436877UL, 36277 | |
| | | 85554UL, 3410473245UL, 2576495467UL, 2026656505UL, 2756955968UL, 4289348777 | |
| | | UL, 2856163034UL, 3778019638UL, 2097276163UL, 2273246349UL, 835377715UL, 31 | |
| | | 27280755UL, 1060067446UL, 4233399907UL, 639470242UL, 2856296318UL, 26157750 | |
| | | 11UL, 2351047932UL, 28260330UL, 202834023UL, 1876303820UL, 619308202UL, 309 | |
| | | 4305336UL, 2455670466UL, 4188621628UL, 928095314UL, 3764894047UL, 378538558 | |
| | | 3UL, 117581687UL, 4021419931UL, 2483628986UL, 3759839215UL, 1867816730UL, 4 | |
| | | 254101087UL, 3872575068UL, 1577956550UL, 1687107439UL, 3081878598UL, 400327 | |
| | | 9048UL, 4048067248UL, 2500767965UL, 2804044146UL, 928465955UL, 922020515UL, | |
| | | 1875618971UL, 3865851141UL, 2359176389UL, 173810260UL, 625773097UL, 338468 | |
| | | 4457UL, 2540504961UL, 3665420733UL, 2645143254UL, 2592805114UL, 1392199429U | |
| | | L, 3370444585UL, 1604709429UL, 162781360UL, 3190958614UL, 2534538765UL, 365 | |
| | | 841057UL, 3843585067UL, 3105377832UL, 3778669305UL, 1247069485UL, 152395829 | |
| | | 3UL, 293374051UL, 800971948UL, 720895889UL, 2071968844UL, 324025193UL, 3342 | |
| | | 361801UL, 52577992UL, 3535536111UL, 3632553233UL, 352757977UL, 1386594581UL | |
| | | , 3305908059UL, 2649344603UL, 1501841977UL, 1751860798UL, 3160423601UL, 264 | |
| | | 880505UL, 778965559UL, 3232142321UL, 3087428595UL, 3814775120UL, 1127761012 | |
| | | UL, 3880222002UL, 4163350364UL, 1721646249UL, 3640773034UL, 2368512339UL, 7 | |
| | | 02936770UL, 1705444176UL, 713642505UL, 1717761787UL, 197200752UL, 147785869 | |
| | | 4UL, 37117223UL, 1948295454UL, 896215772UL, 1588372042UL, 464173532UL, 5278 | |
| | | 99063UL, 4285770666UL, 3441409029UL, 2124039914UL, 2872629966UL, 171012646U | |
| | | L, 3515750082UL, 2216687886UL, 818571456UL, 1631555059UL, 2662104279UL, 203 | |
| | | 3464928UL, 369438400UL, 329003658UL, 1503365029UL, 4215790910UL, 3264377550 | |
| | | UL, 733526983UL, 2935318632UL, 1792331479UL, 608347530UL, 392723097UL, 1330 | |
| | | 445854UL, 3473004271UL, 1267636682UL, 2150566972UL, 2664910943UL, 259186163 | |
| | | 7UL, 409769584UL, 2943326880UL, 3746302819UL, 3162268832UL, 1028663260UL, 3 | |
| | | 206607045UL, 832105292UL, 2119405275UL, 538318455UL, 2981192295UL, 86177541 | |
| | | 6UL, 609718403UL, 3531204230UL, 1904759571UL, 1262633751UL, 2375133081UL, 4 | |
| | | 60454984UL, 946700253UL, 3763898311UL, 1571175213UL, 3124410107UL, 24134202 | |
| | | 16UL, 2664177543UL, 3241803820UL, 3968067371UL, 1234860999UL, 1130471500UL, | |
| | | 772727786UL, 247203117UL, 576455235UL, 246297007UL, 2027348597UL, 76493388 | |
| | | 7UL, 3812479771UL, 1825807084UL, 4072281412UL, 2156865781UL, 1286484847UL, | |
| | | 1966749063UL, 2479269303UL, 423506843UL, 3070938758UL, 653091413UL, 2267423 | |
| | | 132UL, 2004263526UL, 1374490719UL, 3871990628UL, 841138314UL, 1260317857UL, | |
| | | 3887432433UL, 4025147569UL, 764233331UL, 1794763428UL, 3005903468UL, 87792 | |
| | | 6770UL, 2466593927UL, 2971729561UL, 3203070565UL, 4198500026UL, 815665759UL | |
| | | , 2434508139UL, 1840456368UL, 2279000427UL, 17077200UL, 3178380570UL, 99030 | |
| | | 4199UL, 3578008580UL, 1965763660UL, 1640352477UL, 750159594UL, 2047409402UL | |
| | | , 3576308245UL, 544920564UL, 1730124869UL, 1194761386UL, 3280315505UL, 1473 | |
| | | 34027UL, 2870674244UL, 2076860776UL, 1100947675UL, 2482772161UL, 401966468U | |
| | | L, 1610650855UL, 193868446UL, 3808157106UL, 1509130117UL, 1324484736UL, 385 | |
| | | 2893217UL, 1059179497UL, 4053543778UL, 2557844172UL, 3282312002UL, 68255005 | |
| | | 8UL, 4281899173UL, 137171998UL, 3239159214UL, 2258610918UL, 426724741UL, 35 | |
| | | 02660993UL, 135977383UL, 429929363UL, 3984458137UL, 964026748UL, 2182019070 | |
| | | UL, 3836562946UL, 515026869UL, 359030455UL, 1301694917UL, 2300414803UL, 236 | |
| | | 4654981UL, 3804876710UL, 171119249UL, 2646785698UL, 4283509387UL, 362808776 | |
| | | 3UL, 1748227044UL, 3037141234UL, 3000413256UL, 23007314UL, 3598880509UL, 41 | |
| | | 60517314UL, 112205578UL, 1677675411UL, 734881643UL, 2830770338UL, 347031714 | |
| | | 5UL, 3306806569UL, 2635040943UL, 2671367560UL, 3528996498UL, 3878886478UL, | |
| | | 3114253828UL, 2721384408UL, 3175226991UL, 1393767271UL, 2651623266UL, 37679 | |
| | | 78376UL, 1269699398UL, 1100964192UL, 4169085845UL, 2086718107UL, 1286251099 | |
| | | UL, 764751784UL, 3006878591UL, }, | |
| | | }; | |
| | | | |
| | | static unsigned int precalc_xorwow_offset_matrix_host[8][800] = { | |
| | | {0UL, 0UL, 0UL, 0UL, 3UL, 0UL, 0UL, 0UL, 0UL, 6UL, 0UL, 0UL, 0UL, 0UL, 15UL | |
| | | , 0UL, 0UL, 0UL, 0UL, 30UL, 0UL, 0UL, 0UL, 0UL, 60UL, 0UL, 0UL, 0UL, 0UL, 1 | |
| | | 20UL, 0UL, 0UL, 0UL, 0UL, 240UL, 0UL, 0UL, 0UL, 0UL, 480UL, 0UL, 0UL, 0UL, | |
| | | 0UL, 960UL, 0UL, 0UL, 0UL, 0UL, 1920UL, 0UL, 0UL, 0UL, 0UL, 3840UL, 0UL, 0U | |
| | | L, 0UL, 0UL, 7680UL, 0UL, 0UL, 0UL, 0UL, 15360UL, 0UL, 0UL, 0UL, 0UL, 30720 | |
| | | UL, 0UL, 0UL, 0UL, 0UL, 61440UL, 0UL, 0UL, 0UL, 0UL, 122880UL, 0UL, 0UL, 0U | |
| | | L, 0UL, 245760UL, 0UL, 0UL, 0UL, 0UL, 491520UL, 0UL, 0UL, 0UL, 0UL, 983040U | |
| | | L, 0UL, 0UL, 0UL, 0UL, 1966080UL, 0UL, 0UL, 0UL, 0UL, 3932160UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 7864320UL, 0UL, 0UL, 0UL, 0UL, 15728640UL, 0UL, 0UL, 0UL, 0UL, 31 | |
| | | 457280UL, 0UL, 0UL, 0UL, 0UL, 62914560UL, 0UL, 0UL, 0UL, 0UL, 125829120UL, | |
| | | 0UL, 0UL, 0UL, 0UL, 251658240UL, 0UL, 0UL, 0UL, 0UL, 503316480UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 1006632960UL, 0UL, 0UL, 0UL, 0UL, 2013265920UL, 0UL, 0UL, 0UL, 0 | |
| | | UL, 4026531840UL, 0UL, 0UL, 0UL, 0UL, 3758096384UL, 1UL, 0UL, 0UL, 0UL, 0UL | |
| | | , 2UL, 0UL, 0UL, 0UL, 0UL, 4UL, 0UL, 0UL, 0UL, 0UL, 8UL, 0UL, 0UL, 0UL, 0UL | |
| | | , 16UL, 0UL, 0UL, 0UL, 0UL, 32UL, 0UL, 0UL, 0UL, 0UL, 64UL, 0UL, 0UL, 0UL, | |
| | | 0UL, 128UL, 0UL, 0UL, 0UL, 0UL, 256UL, 0UL, 0UL, 0UL, 0UL, 512UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 1024UL, 0UL, 0UL, 0UL, 0UL, 2048UL, 0UL, 0UL, 0UL, 0UL, 4096UL, | |
| | | 0UL, 0UL, 0UL, 0UL, 8192UL, 0UL, 0UL, 0UL, 0UL, 16384UL, 0UL, 0UL, 0UL, 0UL | |
| | | , 32768UL, 0UL, 0UL, 0UL, 0UL, 65536UL, 0UL, 0UL, 0UL, 0UL, 131072UL, 0UL, | |
| | | 0UL, 0UL, 0UL, 262144UL, 0UL, 0UL, 0UL, 0UL, 524288UL, 0UL, 0UL, 0UL, 0UL, | |
| | | 1048576UL, 0UL, 0UL, 0UL, 0UL, 2097152UL, 0UL, 0UL, 0UL, 0UL, 4194304UL, 0U | |
| | | L, 0UL, 0UL, 0UL, 8388608UL, 0UL, 0UL, 0UL, 0UL, 16777216UL, 0UL, 0UL, 0UL, | |
| | | 0UL, 33554432UL, 0UL, 0UL, 0UL, 0UL, 67108864UL, 0UL, 0UL, 0UL, 0UL, 13421 | |
| | | 7728UL, 0UL, 0UL, 0UL, 0UL, 268435456UL, 0UL, 0UL, 0UL, 0UL, 536870912UL, 0 | |
| | | UL, 0UL, 0UL, 0UL, 1073741824UL, 0UL, 0UL, 0UL, 0UL, 2147483648UL, 0UL, 0UL | |
| | | , 0UL, 0UL, 0UL, 1UL, 0UL, 0UL, 0UL, 0UL, 2UL, 0UL, 0UL, 0UL, 0UL, 4UL, 0UL | |
| | | , 0UL, 0UL, 0UL, 8UL, 0UL, 0UL, 0UL, 0UL, 16UL, 0UL, 0UL, 0UL, 0UL, 32UL, 0 | |
| | | UL, 0UL, 0UL, 0UL, 64UL, 0UL, 0UL, 0UL, 0UL, 128UL, 0UL, 0UL, 0UL, 0UL, 256 | |
| | | UL, 0UL, 0UL, 0UL, 0UL, 512UL, 0UL, 0UL, 0UL, 0UL, 1024UL, 0UL, 0UL, 0UL, 0 | |
| | | UL, 2048UL, 0UL, 0UL, 0UL, 0UL, 4096UL, 0UL, 0UL, 0UL, 0UL, 8192UL, 0UL, 0U | |
| | | L, 0UL, 0UL, 16384UL, 0UL, 0UL, 0UL, 0UL, 32768UL, 0UL, 0UL, 0UL, 0UL, 6553 | |
| | | 6UL, 0UL, 0UL, 0UL, 0UL, 131072UL, 0UL, 0UL, 0UL, 0UL, 262144UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 524288UL, 0UL, 0UL, 0UL, 0UL, 1048576UL, 0UL, 0UL, 0UL, 0UL, 2097 | |
| | | 152UL, 0UL, 0UL, 0UL, 0UL, 4194304UL, 0UL, 0UL, 0UL, 0UL, 8388608UL, 0UL, 0 | |
| | | UL, 0UL, 0UL, 16777216UL, 0UL, 0UL, 0UL, 0UL, 33554432UL, 0UL, 0UL, 0UL, 0U | |
| | | L, 67108864UL, 0UL, 0UL, 0UL, 0UL, 134217728UL, 0UL, 0UL, 0UL, 0UL, 2684354 | |
| | | 56UL, 0UL, 0UL, 0UL, 0UL, 536870912UL, 0UL, 0UL, 0UL, 0UL, 1073741824UL, 0U | |
| | | L, 0UL, 0UL, 0UL, 2147483648UL, 0UL, 0UL, 0UL, 0UL, 0UL, 1UL, 0UL, 0UL, 0UL | |
| | | , 0UL, 2UL, 0UL, 0UL, 0UL, 0UL, 4UL, 0UL, 0UL, 0UL, 0UL, 8UL, 0UL, 0UL, 0UL | |
| | | , 0UL, 16UL, 0UL, 0UL, 0UL, 0UL, 32UL, 0UL, 0UL, 0UL, 0UL, 64UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 128UL, 0UL, 0UL, 0UL, 0UL, 256UL, 0UL, 0UL, 0UL, 0UL, 512UL, 0UL, | |
| | | 0UL, 0UL, 0UL, 1024UL, 0UL, 0UL, 0UL, 0UL, 2048UL, 0UL, 0UL, 0UL, 0UL, 409 | |
| | | 6UL, 0UL, 0UL, 0UL, 0UL, 8192UL, 0UL, 0UL, 0UL, 0UL, 16384UL, 0UL, 0UL, 0UL | |
| | | , 0UL, 32768UL, 0UL, 0UL, 0UL, 0UL, 65536UL, 0UL, 0UL, 0UL, 0UL, 131072UL, | |
| | | 0UL, 0UL, 0UL, 0UL, 262144UL, 0UL, 0UL, 0UL, 0UL, 524288UL, 0UL, 0UL, 0UL, | |
| | | 0UL, 1048576UL, 0UL, 0UL, 0UL, 0UL, 2097152UL, 0UL, 0UL, 0UL, 0UL, 4194304U | |
| | | L, 0UL, 0UL, 0UL, 0UL, 8388608UL, 0UL, 0UL, 0UL, 0UL, 16777216UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 33554432UL, 0UL, 0UL, 0UL, 0UL, 67108864UL, 0UL, 0UL, 0UL, 0UL, | |
| | | 134217728UL, 0UL, 0UL, 0UL, 0UL, 268435456UL, 0UL, 0UL, 0UL, 0UL, 536870912 | |
| | | UL, 0UL, 0UL, 0UL, 0UL, 1073741824UL, 0UL, 0UL, 0UL, 0UL, 2147483648UL, 0UL | |
| | | , 0UL, 0UL, 0UL, 0UL, 1UL, 17UL, 0UL, 0UL, 0UL, 2UL, 34UL, 0UL, 0UL, 0UL, 4 | |
| | | UL, 68UL, 0UL, 0UL, 0UL, 8UL, 136UL, 0UL, 0UL, 0UL, 16UL, 272UL, 0UL, 0UL, | |
| | | 0UL, 32UL, 544UL, 0UL, 0UL, 0UL, 64UL, 1088UL, 0UL, 0UL, 0UL, 128UL, 2176UL | |
| | | , 0UL, 0UL, 0UL, 256UL, 4352UL, 0UL, 0UL, 0UL, 512UL, 8704UL, 0UL, 0UL, 0UL | |
| | | , 1024UL, 17408UL, 0UL, 0UL, 0UL, 2048UL, 34816UL, 0UL, 0UL, 0UL, 4096UL, 6 | |
| | | 9632UL, 0UL, 0UL, 0UL, 8192UL, 139264UL, 0UL, 0UL, 0UL, 16384UL, 278528UL, | |
| | | 0UL, 0UL, 0UL, 32768UL, 557056UL, 0UL, 0UL, 0UL, 65536UL, 1114112UL, 0UL, 0 | |
| | | UL, 0UL, 131072UL, 2228224UL, 0UL, 0UL, 0UL, 262144UL, 4456448UL, 0UL, 0UL, | |
| | | 0UL, 524288UL, 8912896UL, 0UL, 0UL, 0UL, 1048576UL, 17825792UL, 0UL, 0UL, | |
| | | 0UL, 2097152UL, 35651584UL, 0UL, 0UL, 0UL, 4194304UL, 71303168UL, 0UL, 0UL, | |
| | | 0UL, 8388608UL, 142606336UL, 0UL, 0UL, 0UL, 16777216UL, 285212672UL, 0UL, | |
| | | 0UL, 0UL, 33554432UL, 570425344UL, 0UL, 0UL, 0UL, 67108864UL, 1140850688UL, | |
| | | 0UL, 0UL, 0UL, 134217728UL, 2281701376UL, 0UL, 0UL, 0UL, 268435456UL, 2684 | |
| | | 35456UL, 0UL, 0UL, 0UL, 536870912UL, 536870912UL, 0UL, 0UL, 0UL, 1073741824 | |
| | | UL, 1073741824UL, 0UL, 0UL, 0UL, 2147483648UL, 2147483648UL, }, {0UL, 3UL, | |
| | | 51UL, 771UL, 13107UL, 0UL, 6UL, 102UL, 1542UL, 26214UL, 0UL, 15UL, 255UL, 3 | |
| | | 855UL, 65535UL, 0UL, 30UL, 510UL, 7710UL, 131070UL, 0UL, 60UL, 1020UL, 1542 | |
| | | 0UL, 262140UL, 0UL, 120UL, 2040UL, 30840UL, 524280UL, 0UL, 240UL, 4080UL, 6 | |
| | | 1680UL, 1048560UL, 0UL, 480UL, 8160UL, 123360UL, 2097120UL, 0UL, 960UL, 163 | |
| | | 20UL, 246720UL, 4194240UL, 0UL, 1920UL, 32640UL, 493440UL, 8388480UL, 0UL, | |
| | | 3840UL, 65280UL, 986880UL, 16776960UL, 0UL, 7680UL, 130560UL, 1973760UL, 33 | |
| | | 553920UL, 0UL, 15360UL, 261120UL, 3947520UL, 67107840UL, 0UL, 30720UL, 5222 | |
| | | 40UL, 7895040UL, 134215680UL, 0UL, 61440UL, 1044480UL, 15790080UL, 26843136 | |
| | | 0UL, 0UL, 122880UL, 2088960UL, 31580160UL, 536862720UL, 0UL, 245760UL, 4177 | |
| | | 920UL, 63160320UL, 1073725440UL, 0UL, 491520UL, 8355840UL, 126320640UL, 214 | |
| | | 7450880UL, 0UL, 983040UL, 16711680UL, 252641280UL, 4294901760UL, 0UL, 19660 | |
| | | 80UL, 33423360UL, 505282560UL, 4294836224UL, 0UL, 3932160UL, 66846720UL, 10 | |
| | | 10565120UL, 4294705152UL, 0UL, 7864320UL, 133693440UL, 2021130240UL, 429444 | |
| | | 3008UL, 0UL, 15728640UL, 267386880UL, 4042260480UL, 4293918720UL, 0UL, 3145 | |
| | | 7280UL, 534773760UL, 3789553664UL, 4292870144UL, 0UL, 62914560UL, 106954752 | |
| | | 0UL, 3284140032UL, 4290772992UL, 0UL, 125829120UL, 2139095040UL, 2273312768 | |
| | | UL, 4286578688UL, 0UL, 251658240UL, 4278190080UL, 251658240UL, 4278190080UL | |
| | | , 0UL, 503316480UL, 4261412864UL, 503316480UL, 4261412864UL, 0UL, 100663296 | |
| | | 0UL, 4227858432UL, 1006632960UL, 4227858432UL, 0UL, 2013265920UL, 416074956 | |
| | | 8UL, 2013265920UL, 4160749568UL, 0UL, 4026531840UL, 4026531840UL, 402653184 | |
| | | 0UL, 4026531840UL, 0UL, 3758096384UL, 3758096384UL, 3758096384UL, 375809638 | |
| | | 4UL, 0UL, 0UL, 3UL, 51UL, 771UL, 0UL, 0UL, 6UL, 102UL, 1542UL, 0UL, 0UL, 15 | |
| | | UL, 255UL, 3855UL, 0UL, 0UL, 30UL, 510UL, 7710UL, 0UL, 0UL, 60UL, 1020UL, 1 | |
| | | 5420UL, 0UL, 0UL, 120UL, 2040UL, 30840UL, 0UL, 0UL, 240UL, 4080UL, 61680UL, | |
| | | 0UL, 0UL, 480UL, 8160UL, 123360UL, 0UL, 0UL, 960UL, 16320UL, 246720UL, 0UL | |
| | | , 0UL, 1920UL, 32640UL, 493440UL, 0UL, 0UL, 3840UL, 65280UL, 986880UL, 0UL, | |
| | | 0UL, 7680UL, 130560UL, 1973760UL, 0UL, 0UL, 15360UL, 261120UL, 3947520UL, | |
| | | 0UL, 0UL, 30720UL, 522240UL, 7895040UL, 0UL, 0UL, 61440UL, 1044480UL, 15790 | |
| | | 080UL, 0UL, 0UL, 122880UL, 2088960UL, 31580160UL, 0UL, 0UL, 245760UL, 41779 | |
| | | 20UL, 63160320UL, 0UL, 0UL, 491520UL, 8355840UL, 126320640UL, 0UL, 0UL, 983 | |
| | | 040UL, 16711680UL, 252641280UL, 0UL, 0UL, 1966080UL, 33423360UL, 505282560U | |
| | | L, 0UL, 0UL, 3932160UL, 66846720UL, 1010565120UL, 0UL, 0UL, 7864320UL, 1336 | |
| | | 93440UL, 2021130240UL, 0UL, 0UL, 15728640UL, 267386880UL, 4042260480UL, 0UL | |
| | | , 0UL, 31457280UL, 534773760UL, 3789553664UL, 0UL, 0UL, 62914560UL, 1069547 | |
| | | 520UL, 3284140032UL, 0UL, 0UL, 125829120UL, 2139095040UL, 2273312768UL, 0UL | |
| | | , 0UL, 251658240UL, 4278190080UL, 251658240UL, 0UL, 0UL, 503316480UL, 42614 | |
| | | 12864UL, 503316480UL, 0UL, 0UL, 1006632960UL, 4227858432UL, 1006632960UL, 0 | |
| | | UL, 0UL, 2013265920UL, 4160749568UL, 2013265920UL, 0UL, 0UL, 4026531840UL, | |
| | | 4026531840UL, 4026531840UL, 0UL, 0UL, 3758096384UL, 3758096384UL, 375809638 | |
| | | 4UL, 0UL, 0UL, 0UL, 3UL, 51UL, 0UL, 0UL, 0UL, 6UL, 102UL, 0UL, 0UL, 0UL, 15 | |
| | | UL, 255UL, 0UL, 0UL, 0UL, 30UL, 510UL, 0UL, 0UL, 0UL, 60UL, 1020UL, 0UL, 0U | |
| | | L, 0UL, 120UL, 2040UL, 0UL, 0UL, 0UL, 240UL, 4080UL, 0UL, 0UL, 0UL, 480UL, | |
| | | 8160UL, 0UL, 0UL, 0UL, 960UL, 16320UL, 0UL, 0UL, 0UL, 1920UL, 32640UL, 0UL, | |
| | | 0UL, 0UL, 3840UL, 65280UL, 0UL, 0UL, 0UL, 7680UL, 130560UL, 0UL, 0UL, 0UL, | |
| | | 15360UL, 261120UL, 0UL, 0UL, 0UL, 30720UL, 522240UL, 0UL, 0UL, 0UL, 61440U | |
| | | L, 1044480UL, 0UL, 0UL, 0UL, 122880UL, 2088960UL, 0UL, 0UL, 0UL, 245760UL, | |
| | | 4177920UL, 0UL, 0UL, 0UL, 491520UL, 8355840UL, 0UL, 0UL, 0UL, 983040UL, 167 | |
| | | 11680UL, 0UL, 0UL, 0UL, 1966080UL, 33423360UL, 0UL, 0UL, 0UL, 3932160UL, 66 | |
| | | 846720UL, 0UL, 0UL, 0UL, 7864320UL, 133693440UL, 0UL, 0UL, 0UL, 15728640UL, | |
| | | 267386880UL, 0UL, 0UL, 0UL, 31457280UL, 534773760UL, 0UL, 0UL, 0UL, 629145 | |
| | | 60UL, 1069547520UL, 0UL, 0UL, 0UL, 125829120UL, 2139095040UL, 0UL, 0UL, 0UL | |
| | | , 251658240UL, 4278190080UL, 0UL, 0UL, 0UL, 503316480UL, 4261412864UL, 0UL, | |
| | | 0UL, 0UL, 1006632960UL, 4227858432UL, 0UL, 0UL, 0UL, 2013265920UL, 4160749 | |
| | | 568UL, 0UL, 0UL, 0UL, 4026531840UL, 4026531840UL, 0UL, 0UL, 0UL, 3758096384 | |
| | | UL, 3758096384UL, 0UL, 0UL, 0UL, 0UL, 3UL, 0UL, 0UL, 0UL, 0UL, 6UL, 0UL, 0U | |
| | | L, 0UL, 0UL, 15UL, 0UL, 0UL, 0UL, 0UL, 30UL, 0UL, 0UL, 0UL, 0UL, 60UL, 0UL, | |
| | | 0UL, 0UL, 0UL, 120UL, 0UL, 0UL, 0UL, 0UL, 240UL, 0UL, 0UL, 0UL, 0UL, 480UL | |
| | | , 0UL, 0UL, 0UL, 0UL, 960UL, 0UL, 0UL, 0UL, 0UL, 1920UL, 0UL, 0UL, 0UL, 0UL | |
| | | , 3840UL, 0UL, 0UL, 0UL, 0UL, 7680UL, 0UL, 0UL, 0UL, 0UL, 15360UL, 0UL, 0UL | |
| | | , 0UL, 0UL, 30720UL, 0UL, 0UL, 0UL, 0UL, 61440UL, 0UL, 0UL, 0UL, 0UL, 12288 | |
| | | 0UL, 0UL, 0UL, 0UL, 0UL, 245760UL, 0UL, 0UL, 0UL, 0UL, 491520UL, 0UL, 0UL, | |
| | | 0UL, 0UL, 983040UL, 0UL, 0UL, 0UL, 0UL, 1966080UL, 0UL, 0UL, 0UL, 0UL, 3932 | |
| | | 160UL, 0UL, 0UL, 0UL, 0UL, 7864320UL, 0UL, 0UL, 0UL, 0UL, 15728640UL, 0UL, | |
| | | 0UL, 0UL, 0UL, 31457280UL, 0UL, 0UL, 0UL, 0UL, 62914560UL, 0UL, 0UL, 0UL, 0 | |
| | | UL, 125829120UL, 0UL, 0UL, 0UL, 0UL, 251658240UL, 0UL, 0UL, 0UL, 0UL, 50331 | |
| | | 6480UL, 0UL, 0UL, 0UL, 0UL, 1006632960UL, 0UL, 0UL, 0UL, 0UL, 2013265920UL, | |
| | | 0UL, 0UL, 0UL, 0UL, 4026531840UL, 0UL, 0UL, 0UL, 0UL, 3758096384UL, 1UL, 1 | |
| | | 7UL, 257UL, 4369UL, 65537UL, 2UL, 34UL, 514UL, 8738UL, 131074UL, 4UL, 68UL, | |
| | | 1028UL, 17476UL, 262148UL, 8UL, 136UL, 2056UL, 34952UL, 524296UL, 16UL, 27 | |
| | | 2UL, 4112UL, 69904UL, 1048592UL, 32UL, 544UL, 8224UL, 139808UL, 2097184UL, | |
| | | 64UL, 1088UL, 16448UL, 279616UL, 4194368UL, 128UL, 2176UL, 32896UL, 559232U | |
| | | L, 8388736UL, 256UL, 4352UL, 65792UL, 1118464UL, 16777472UL, 512UL, 8704UL, | |
| | | 131584UL, 2236928UL, 33554944UL, 1024UL, 17408UL, 263168UL, 4473856UL, 671 | |
| | | 09888UL, 2048UL, 34816UL, 526336UL, 8947712UL, 134219776UL, 4096UL, 69632UL | |
| | | , 1052672UL, 17895424UL, 268439552UL, 8192UL, 139264UL, 2105344UL, 35790848 | |
| | | UL, 536879104UL, 16384UL, 278528UL, 4210688UL, 71581696UL, 1073758208UL, 32 | |
| | | 768UL, 557056UL, 8421376UL, 143163392UL, 2147516416UL, 65536UL, 1114112UL, | |
| | | 16842752UL, 286326784UL, 65536UL, 131072UL, 2228224UL, 33685504UL, 57265356 | |
| | | 8UL, 131072UL, 262144UL, 4456448UL, 67371008UL, 1145307136UL, 262144UL, 524 | |
| | | 288UL, 8912896UL, 134742016UL, 2290614272UL, 524288UL, 1048576UL, 17825792U | |
| | | L, 269484032UL, 286261248UL, 1048576UL, 2097152UL, 35651584UL, 538968064UL, | |
| | | 572522496UL, 2097152UL, 4194304UL, 71303168UL, 1077936128UL, 1145044992UL, | |
| | | 4194304UL, 8388608UL, 142606336UL, 2155872256UL, 2290089984UL, 8388608UL, | |
| | | 16777216UL, 285212672UL, 16777216UL, 285212672UL, 16777216UL, 33554432UL, 5 | |
| | | 70425344UL, 33554432UL, 570425344UL, 33554432UL, 67108864UL, 1140850688UL, | |
| | | 67108864UL, 1140850688UL, 67108864UL, 134217728UL, 2281701376UL, 134217728U | |
| | | L, 2281701376UL, 134217728UL, 268435456UL, 268435456UL, 268435456UL, 268435 | |
| | | 456UL, 268435456UL, 536870912UL, 536870912UL, 536870912UL, 536870912UL, 536 | |
| | | 870912UL, 1073741824UL, 1073741824UL, 1073741824UL, 1073741824UL, 107374182 | |
| | | 4UL, 2147483648UL, 2147483648UL, 2147483648UL, 2147483648UL, 2147483648UL, | |
| | | }, {85009117UL, 335741939UL, 1412632518UL, 386859243UL, 1741437244UL, 15213 | |
| | | 9416UL, 403047142UL, 2556825231UL, 505087203UL, 4287193174UL, 335609039UL, | |
| | | 336528191UL, 1425998811UL, 456920088UL, 2832198590UL, 724748988UL, 36258456 | |
| | | 30UL, 1509824181UL, 3330088197UL, 2710488401UL, 1431742057UL, 1077674236UL, | |
| | | 1140592489UL, 2096905276UL, 3007294393UL, 2863484114UL, 1081606648UL, 1207 | |
| | | 443154UL, 972585080UL, 2793363314UL, 1432000919UL, 1089470704UL, 1341132452 | |
| | | UL, 3019109363UL, 2362285522UL, 1790260014UL, 2178941408UL, 2682264904UL, 1 | |
| | | 743251430UL, 429603751UL, 359294556UL, 62915520UL, 1069562512UL, 3486502860 | |
| | | UL, 859207501UL, 3939814584UL, 125831040UL, 2139125024UL, 2678038424UL, 171 | |
| | | 8415002UL, 363436400UL, 251662080UL, 4278250048UL, 1061109552UL, 3436830004 | |
| | | UL, 3948098272UL, 503324160UL, 4261532800UL, 2122219104UL, 2310257256UL, 38 | |
| | | 0003776UL, 1006648320UL, 4228098304UL, 4244438208UL, 3278337232UL, 39812330 | |
| | | 24UL, 2013296640UL, 4161229312UL, 4193909120UL, 2530142624UL, 446273280UL, | |
| | | 4026593280UL, 4027491328UL, 871625472UL, 4254978880UL, 4113772032UL, 375821 | |
| | | 9264UL, 3760015360UL, 2011686400UL, 3946555008UL, 711351296UL, 3221471232UL | |
| | | , 3225063424UL, 4291808256UL, 108481792UL, 2496444416UL, 2147975168UL, 2155 | |
| | | 159552UL, 4020213760UL, 485399040UL, 3919147008UL, 983040UL, 15351808UL, 25 | |
| | | 5799296UL, 3923588096UL, 322101248UL, 1966080UL, 299139072UL, 511598592UL, | |
| | | 3283773440UL, 3865427968UL, 3932160UL, 4087939072UL, 1023197184UL, 14672732 | |
| | | 16UL, 214663168UL, 7864320UL, 4149346304UL, 2046394368UL, 3202981888UL, 365 | |
| | | 0551808UL, 3236954112UL, 1050935296UL, 871563264UL, 2916302848UL, 193239449 | |
| | | 6UL, 2447376384UL, 1833435136UL, 2011561984UL, 2342944768UL, 643563520UL, 8 | |
| | | 68220928UL, 177209344UL, 4291559424UL, 122486784UL, 2360868864UL, 200487731 | |
| | | 2UL, 85983232UL, 4019716096UL, 3734634496UL, 3647995904UL, 1056964608UL, 36 | |
| | | 61627392UL, 254803968UL, 2905866240UL, 1658847232UL, 2113929216UL, 30282874 | |
| | | 88UL, 3730833408UL, 2322071552UL, 3586129920UL, 4227858432UL, 1761607680UL, | |
| | | 2092957696UL, 80740352UL, 2071986176UL, 4160749568UL, 3523215360UL, 964689 | |
| | | 920UL, 429916160UL, 3875536896UL, 4026531840UL, 2751463424UL, 1929379840UL, | |
| | | 4081057792UL, 503316480UL, 3758096384UL, 2281701376UL, 4127195136UL, 33973 | |
| | | 86240UL, 1316635UL, 85009117UL, 335741939UL, 1412632518UL, 386859243UL, 158 | |
| | | 0547UL, 152139416UL, 403047142UL, 2556825231UL, 505087203UL, 1317672UL, 335 | |
| | | 609039UL, 336528191UL, 1425998811UL, 456920088UL, 1574501UL, 724748988UL, 3 | |
| | | 625845630UL, 1509824181UL, 3330088197UL, 15612UL, 1431742057UL, 1077674236U | |
| | | L, 1140592489UL, 2096905276UL, 31224UL, 2863484114UL, 1081606648UL, 1207443 | |
| | | 154UL, 972585080UL, 62451UL, 1432000919UL, 1089470704UL, 1341132452UL, 3019 | |
| | | 109363UL, 124902UL, 1790260014UL, 2178941408UL, 2682264904UL, 1743251430UL, | |
| | | 249804UL, 359294556UL, 62915520UL, 1069562512UL, 3486502860UL, 499608UL, 3 | |
| | | 939814584UL, 125831040UL, 2139125024UL, 2678038424UL, 999216UL, 363436400UL | |
| | | , 251662080UL, 4278250048UL, 1061109552UL, 3223223904UL, 3948098272UL, 5033 | |
| | | 24160UL, 4261532800UL, 2122219104UL, 1077738688UL, 380003776UL, 1006648320U | |
| | | L, 4228098304UL, 4244438208UL, 1081735552UL, 3981233024UL, 2013296640UL, 41 | |
| | | 61229312UL, 4193909120UL, 1089729280UL, 446273280UL, 4026593280UL, 40274913 | |
| | | 28UL, 871625472UL, 2179458560UL, 4113772032UL, 3758219264UL, 3760015360UL, | |
| | | 2011686400UL, 63949824UL, 711351296UL, 3221471232UL, 3225063424UL, 42918082 | |
| | | 56UL, 127899648UL, 2496444416UL, 2147975168UL, 2155159552UL, 4020213760UL, | |
| | | 255799296UL, 3919147008UL, 983040UL, 15351808UL, 255799296UL, 3732824064UL, | |
| | | 322101248UL, 1966080UL, 299139072UL, 511598592UL, 2096939008UL, 3865427968 | |
| | | UL, 3932160UL, 4087939072UL, 1023197184UL, 972652544UL, 214663168UL, 786432 | |
| | | 0UL, 4149346304UL, 2046394368UL, 3019046912UL, 3650551808UL, 3236954112UL, | |
| | | 1050935296UL, 871563264UL, 1743126528UL, 1932394496UL, 2447376384UL, 183343 | |
| | | 5136UL, 2011561984UL, 3486253056UL, 643563520UL, 868220928UL, 177209344UL, | |
| | | 4291559424UL, 2677538816UL, 2360868864UL, 2004877312UL, 85983232UL, 4019716 | |
| | | 096UL, 1060110336UL, 3647995904UL, 1056964608UL, 3661627392UL, 254803968UL, | |
| | | 3193962496UL, 1658847232UL, 2113929216UL, 3028287488UL, 3730833408UL, 3166 | |
| | | 699520UL, 3586129920UL, 4227858432UL, 1761607680UL, 2092957696UL, 311217356 | |
| | | 8UL, 2071986176UL, 4160749568UL, 3523215360UL, 964689920UL, 1929379840UL, 3 | |
| | | 875536896UL, 4026531840UL, 2751463424UL, 1929379840UL, 4127195136UL, 503316 | |
| | | 480UL, 3758096384UL, 2281701376UL, 4127195136UL, 332854UL, 1316635UL, 85009 | |
| | | 117UL, 335741939UL, 1412632518UL, 596079UL, 1580547UL, 152139416UL, 4030471 | |
| | | 42UL, 2556825231UL, 1316075UL, 1317672UL, 335609039UL, 336528191UL, 1425998 | |
| | | 811UL, 2824661UL, 1574501UL, 724748988UL, 3625845630UL, 1509824181UL, 55714 | |
| | | 97UL, 15612UL, 1431742057UL, 1077674236UL, 1140592489UL, 11142994UL, 31224U | |
| | | L, 2863484114UL, 1081606648UL, 1207443154UL, 22285988UL, 62451UL, 143200091 | |
| | | 9UL, 1089470704UL, 1341132452UL, 44571976UL, 124902UL, 1790260014UL, 217894 | |
| | | 1408UL, 2682264904UL, 89143952UL, 249804UL, 359294556UL, 62915520UL, 106956 | |
| | | 2512UL, 178287904UL, 499608UL, 3939814584UL, 125831040UL, 2139125024UL, 356 | |
| | | 575808UL, 999216UL, 363436400UL, 251662080UL, 4278250048UL, 713151616UL, 32 | |
| | | 23223904UL, 3948098272UL, 503324160UL, 4261532800UL, 1426303232UL, 10777386 | |
| | | 88UL, 380003776UL, 1006648320UL, 4228098304UL, 2852606464UL, 1081735552UL, | |
| | | 3981233024UL, 2013296640UL, 4161229312UL, 1410245632UL, 1089729280UL, 44627 | |
| | | 3280UL, 4026593280UL, 4027491328UL, 1746749440UL, 2179458560UL, 4113772032U | |
| | | L, 3758219264UL, 3760015360UL, 272273408UL, 63949824UL, 711351296UL, 322147 | |
| | | 1232UL, 3225063424UL, 3765772288UL, 127899648UL, 2496444416UL, 2147975168UL | |
| | | , 2155159552UL, 15351808UL, 255799296UL, 3919147008UL, 983040UL, 15351808UL | |
| | | , 3251929088UL, 3732824064UL, 322101248UL, 1966080UL, 299139072UL, 11351490 | |
| | | 56UL, 2096939008UL, 3865427968UL, 3932160UL, 4087939072UL, 1196556288UL, 97 | |
| | | 2652544UL, 214663168UL, 7864320UL, 4149346304UL, 1319370752UL, 3019046912UL | |
| | | , 3650551808UL, 3236954112UL, 1050935296UL, 2638741504UL, 1743126528UL, 193 | |
| | | 2394496UL, 2447376384UL, 1833435136UL, 982515712UL, 3486253056UL, 643563520 | |
| | | UL, 868220928UL, 177209344UL, 1965031424UL, 2677538816UL, 2360868864UL, 200 | |
| | | 4877312UL, 85983232UL, 3930062848UL, 1060110336UL, 3647995904UL, 1056964608 | |
| | | UL, 3661627392UL, 3565158400UL, 3193962496UL, 1658847232UL, 2113929216UL, 3 | |
| | | 028287488UL, 2835349504UL, 3166699520UL, 3586129920UL, 4227858432UL, 176160 | |
| | | 7680UL, 1375731712UL, 3112173568UL, 2071986176UL, 4160749568UL, 3523215360U | |
| | | L, 2751463424UL, 1929379840UL, 3875536896UL, 4026531840UL, 2751463424UL, 22 | |
| | | 81701376UL, 4127195136UL, 503316480UL, 3758096384UL, 2281701376UL, 5123UL, | |
| | | 332854UL, 1316635UL, 85009117UL, 335741939UL, 6150UL, 596079UL, 1580547UL, | |
| | | 152139416UL, 403047142UL, 5135UL, 1316075UL, 1317672UL, 335609039UL, 336528 | |
| | | 191UL, 6174UL, 2824661UL, 1574501UL, 724748988UL, 3625845630UL, 60UL, 55714 | |
| | | 97UL, 15612UL, 1431742057UL, 1077674236UL, 120UL, 11142994UL, 31224UL, 2863 | |
| | | 484114UL, 1081606648UL, 240UL, 22285988UL, 62451UL, 1432000919UL, 108947070 | |
| | | 4UL, 480UL, 44571976UL, 124902UL, 1790260014UL, 2178941408UL, 960UL, 891439 | |
| | | 52UL, 249804UL, 359294556UL, 62915520UL, 1920UL, 178287904UL, 499608UL, 393 | |
| | | 9814584UL, 125831040UL, 3840UL, 356575808UL, 999216UL, 363436400UL, 2516620 | |
| | | 80UL, 7680UL, 713151616UL, 3223223904UL, 3948098272UL, 503324160UL, 15360UL | |
| | | , 1426303232UL, 1077738688UL, 380003776UL, 1006648320UL, 30720UL, 285260646 | |
| | | 4UL, 1081735552UL, 3981233024UL, 2013296640UL, 61440UL, 1410245632UL, 10897 | |
| | | 29280UL, 446273280UL, 4026593280UL, 122880UL, 1746749440UL, 2179458560UL, 4 | |
| | | 113772032UL, 3758219264UL, 245760UL, 272273408UL, 63949824UL, 711351296UL, | |
| | | 3221471232UL, 491520UL, 3765772288UL, 127899648UL, 2496444416UL, 2147975168 | |
| | | UL, 983040UL, 15351808UL, 255799296UL, 3919147008UL, 983040UL, 3223191552UL | |
| | | , 3251929088UL, 3732824064UL, 322101248UL, 1966080UL, 1077673984UL, 1135149 | |
| | | 056UL, 2096939008UL, 3865427968UL, 3932160UL, 1081606144UL, 1196556288UL, 9 | |
| | | 72652544UL, 214663168UL, 7864320UL, 1089470464UL, 1319370752UL, 3019046912U | |
| | | L, 3650551808UL, 3236954112UL, 2178940928UL, 2638741504UL, 1743126528UL, 19 | |
| | | 32394496UL, 2447376384UL, 62914560UL, 982515712UL, 3486253056UL, 643563520U | |
| | | L, 868220928UL, 125829120UL, 1965031424UL, 2677538816UL, 2360868864UL, 2004 | |
| | | 877312UL, 251658240UL, 3930062848UL, 1060110336UL, 3647995904UL, 1056964608 | |
| | | UL, 503316480UL, 3565158400UL, 3193962496UL, 1658847232UL, 2113929216UL, 10 | |
| | | 06632960UL, 2835349504UL, 3166699520UL, 3586129920UL, 4227858432UL, 2013265 | |
| | | 920UL, 1375731712UL, 3112173568UL, 2071986176UL, 4160749568UL, 4026531840UL | |
| | | , 2751463424UL, 1929379840UL, 3875536896UL, 4026531840UL, 3758096384UL, 228 | |
| | | 1701376UL, 4127195136UL, 503316480UL, 3758096384UL, 201392209UL, 3423671362 | |
| | | UL, 218366296UL, 3713336838UL, 206572594UL, 402785186UL, 2552372100UL, 4369 | |
| | | 28947UL, 3130605370UL, 463476848UL, 262468UL, 4461835UL, 68158800UL, 115870 | |
| | | 0908UL, 20971524UL, 524680UL, 8919318UL, 136513955UL, 2316537326UL, 2516585 | |
| | | 2UL, 3222274064UL, 3239051564UL, 3494187077UL, 3558090985UL, 3221225500UL, | |
| | | 2149580832UL, 2183135832UL, 2693406858UL, 2821214674UL, 2147483704UL, 41943 | |
| | | 68UL, 71304368UL, 1091846420UL, 1347462055UL, 64UL, 8388736UL, 142608736UL, | |
| | | 2183692840UL, 2694924110UL, 3221225600UL, 16777472UL, 285217472UL, 7241838 | |
| | | 4UL, 1094880924UL, 1342177536UL, 33554944UL, 570434944UL, 144836768UL, 2189 | |
| | | 761848UL, 2684355072UL, 67109888UL, 1140869888UL, 289673536UL, 84556400UL, | |
| | | 1073742848UL, 134219776UL, 2281739776UL, 579347072UL, 169112800UL, 21474856 | |
| | | 96UL, 268439552UL, 268512256UL, 1158694144UL, 69790144UL, 4096UL, 536879104 | |
| | | UL, 537024512UL, 2317388288UL, 3360805760UL, 8192UL, 1073758208UL, 10740490 | |
| | | 24UL, 339809280UL, 1352902400UL, 16384UL, 2147516416UL, 2148098048UL, 39008 | |
| | | 44032UL, 1632062976UL, 32768UL, 65536UL, 1228800UL, 17059840UL, 311335936UL | |
| | | , 65536UL, 131072UL, 2457600UL, 34119680UL, 622671872UL, 131072UL, 262144UL | |
| | | , 4915200UL, 68239360UL, 1245343744UL, 262144UL, 524288UL, 9830400UL, 13647 | |
| | | 8720UL, 2490687488UL, 524288UL, 1048576UL, 288096256UL, 272957440UL, 954843 | |
| | | 136UL, 3222274048UL, 2097152UL, 3797417984UL, 545914880UL, 2983428096UL, 21 | |
| | | 49580800UL, 4194304UL, 78643200UL, 1091829760UL, 2745630720UL, 4194304UL, 3 | |
| | | 229614080UL, 3378511872UL, 1109917696UL, 2270035968UL, 8388608UL, 135895449 | |
| | | 6UL, 1119879168UL, 1414529024UL, 513540096UL, 16777216UL, 2717908992UL, 223 | |
| | | 9758336UL, 2829058048UL, 1027080192UL, 33554432UL, 1140850688UL, 184549376U | |
| | | L, 1363148800UL, 2054160384UL, 3288334336UL, 2281701376UL, 369098752UL, 272 | |
| | | 6297600UL, 4108320768UL, 2281701376UL, 268435456UL, 738197504UL, 2231369728 | |
| | | UL, 968884224UL, 3959422976UL, 536870912UL, 1476395008UL, 167772160UL, 3011 | |
| | | 510272UL, 3355443200UL, 1073741824UL, 2952790016UL, 335544320UL, 1728053248 | |
| | | UL, 2147483648UL, 2147483648UL, 1610612736UL, 3892314112UL, 503316480UL, 0U | |
| | | L, }, {1939838472UL, 1412147404UL, 166205219UL, 1757484276UL, 2905930693UL, | |
| | | 2345662040UL, 2845657161UL, 253454719UL, 2661974169UL, 303781080UL, 407533 | |
| | | 1504UL, 31014156UL, 244538930UL, 3752264221UL, 992575155UL, 219309525UL, 24 | |
| | | 6620060UL, 215640989UL, 4125020723UL, 2016731730UL, 3236558869UL, 297169276 | |
| | | UL, 3293566751UL, 1867504216UL, 210423272UL, 2531663658UL, 499723753UL, 173 | |
| | | 0625896UL, 189236880UL, 3388575408UL, 2433358422UL, 1368961148UL, 313409684 | |
| | | 8UL, 2827836415UL, 3888822753UL, 4172043647UL, 3379360748UL, 2651760955UL, | |
| | | 1345081091UL, 627692776UL, 189423917UL, 1927379456UL, 4004336944UL, 2995932 | |
| | | 065UL, 1882016234UL, 2551113616UL, 1576396048UL, 1299792730UL, 2151240795UL | |
| | | , 2154814108UL, 4292139924UL, 3555849728UL, 943986992UL, 3169912733UL, 2631 | |
| | | 635779UL, 3478094562UL, 1285558544UL, 3716074330UL, 2780749859UL, 391110651 | |
| | | 0UL, 4175656994UL, 1731832828UL, 1275401375UL, 937322456UL, 3802094750UL, 1 | |
| | | 145506936UL, 1008905193UL, 1718801768UL, 645739137UL, 1356219146UL, 8278868 | |
| | | 16UL, 1722154800UL, 2242776733UL, 754630810UL, 772070504UL, 249481170UL, 26 | |
| | | 08123425UL, 2087201889UL, 3200968096UL, 3292110026UL, 841433255UL, 47754342 | |
| | | 7UL, 1878882709UL, 705347364UL, 4003860146UL, 3194913138UL, 2616490007UL, 3 | |
| | | 57561212UL, 2446098297UL, 2955680594UL, 2512991743UL, 637464579UL, 12091324 | |
| | | 55UL, 1341312804UL, 612108672UL, 2455017713UL, 1749147666UL, 4020226825UL, | |
| | | 2873924220UL, 499405095UL, 1837614076UL, 1227604028UL, 714577577UL, 1659502 | |
| | | 08UL, 442290261UL, 489077752UL, 216760440UL, 42151250UL, 426862080UL, 28102 | |
| | | 42474UL, 4112075489UL, 3514761468UL, 4101921371UL, 982512636UL, 500792667UL | |
| | | , 4286077681UL, 198050301UL, 1858712743UL, 2913642493UL, 3547545255UL, 3981 | |
| | | 929169UL, 2944140287UL, 2286578015UL, 3422343167UL, 1239123295UL, 202636739 | |
| | | 4UL, 3269986302UL, 3028402878UL, 2709637886UL, 1096011710UL, 294584132UL, 3 | |
| | | 086749695UL, 3324400975UL, 1164394495UL, 4290155855UL, 543687304UL, 4008517 | |
| | | 630UL, 836370334UL, 1876426750UL, 2362048414UL, 3578325264UL, 3221487612UL, | |
| | | 2671154748UL, 3395518460UL, 2018383420UL, 2131029536UL, 2165829624UL, 6976 | |
| | | 61816UL, 1336049656UL, 3309365624UL, 4259639360UL, 3423548400UL, 2416417776 | |
| | | UL, 1633698800UL, 1630071792UL, 41950336UL, 3423478496UL, 2885608160UL, 394 | |
| | | 3744224UL, 677380832UL, 4179285363UL, 1939838472UL, 1412147404UL, 166205219 | |
| | | UL, 1757484276UL, 3838244595UL, 2345662040UL, 2845657161UL, 253454719UL, 26 | |
| | | 61974169UL, 138737288UL, 4075331504UL, 31014156UL, 244538930UL, 3752264221U | |
| | | L, 1503392345UL, 219309525UL, 246620060UL, 215640989UL, 4125020723UL, 17594 | |
| | | 81152UL, 3236558869UL, 297169276UL, 3293566751UL, 1867504216UL, 3898070400U | |
| | | L, 2531663658UL, 499723753UL, 1730625896UL, 189236880UL, 2610231010UL, 2433 | |
| | | 358422UL, 1368961148UL, 3134096848UL, 2827836415UL, 3903474593UL, 417204364 | |
| | | 7UL, 3379360748UL, 2651760955UL, 1345081091UL, 1267864331UL, 189423917UL, 1 | |
| | | 927379456UL, 4004336944UL, 2995932065UL, 3452816347UL, 2551113616UL, 157639 | |
| | | 6048UL, 1299792730UL, 2151240795UL, 1222520631UL, 4292139924UL, 3555849728U | |
| | | L, 943986992UL, 3169912733UL, 3260130211UL, 3478094562UL, 1285558544UL, 371 | |
| | | 6074330UL, 2780749859UL, 3039362306UL, 4175656994UL, 1731832828UL, 12754013 | |
| | | 75UL, 937322456UL, 3236754932UL, 1145506936UL, 1008905193UL, 1718801768UL, | |
| | | 645739137UL, 1358079399UL, 827886816UL, 1722154800UL, 2242776733UL, 7546308 | |
| | | 10UL, 1748663943UL, 249481170UL, 2608123425UL, 2087201889UL, 3200968096UL, | |
| | | 698076610UL, 841433255UL, 477543427UL, 1878882709UL, 705347364UL, 369279499 | |
| | | 6UL, 3194913138UL, 2616490007UL, 357561212UL, 2446098297UL, 2771068186UL, 2 | |
| | | 512991743UL, 637464579UL, 1209132455UL, 1341312804UL, 27937268UL, 245501771 | |
| | | 3UL, 1749147666UL, 4020226825UL, 2873924220UL, 1673040956UL, 1837614076UL, | |
| | | 1227604028UL, 714577577UL, 165950208UL, 528340088UL, 489077752UL, 216760440 | |
| | | UL, 42151250UL, 426862080UL, 1646215396UL, 4112075489UL, 3514761468UL, 4101 | |
| | | 921371UL, 982512636UL, 2095821304UL, 4286077681UL, 198050301UL, 1858712743U | |
| | | L, 2913642493UL, 277300160UL, 3981929169UL, 2944140287UL, 2286578015UL, 342 | |
| | | 2343167UL, 1178044288UL, 2026367394UL, 3269986302UL, 3028402878UL, 27096378 | |
| | | 86UL, 2234191616UL, 294584132UL, 3086749695UL, 3324400975UL, 1164394495UL, | |
| | | 136978944UL, 543687304UL, 4008517630UL, 836370334UL, 1876426750UL, 32752537 | |
| | | 60UL, 3578325264UL, 3221487612UL, 2671154748UL, 3395518460UL, 3942394880UL, | |
| | | 2131029536UL, 2165829624UL, 697661816UL, 1336049656UL, 3265045504UL, 42596 | |
| | | 39360UL, 3423548400UL, 2416417776UL, 1633698800UL, 3943712768UL, 41950336UL | |
| | | , 3423478496UL, 2885608160UL, 3943744224UL, 2293593009UL, 4179285363UL, 193 | |
| | | 9838472UL, 1412147404UL, 166205219UL, 715714152UL, 3838244595UL, 2345662040 | |
| | | UL, 2845657161UL, 253454719UL, 3758048260UL, 138737288UL, 4075331504UL, 310 | |
| | | 14156UL, 244538930UL, 370671650UL, 1503392345UL, 219309525UL, 246620060UL, | |
| | | 215640989UL, 2219162331UL, 1759481152UL, 3236558869UL, 297169276UL, 3293566 | |
| | | 751UL, 135243402UL, 3898070400UL, 2531663658UL, 499723753UL, 1730625896UL, | |
| | | 3142293713UL, 2610231010UL, 2433358422UL, 1368961148UL, 3134096848UL, 48694 | |
| | | 9791UL, 3903474593UL, 4172043647UL, 3379360748UL, 2651760955UL, 3172880550U | |
| | | L, 1267864331UL, 189423917UL, 1927379456UL, 4004336944UL, 191463910UL, 3452 | |
| | | 816347UL, 2551113616UL, 1576396048UL, 1299792730UL, 4411574UL, 1222520631UL | |
| | | , 4292139924UL, 3555849728UL, 943986992UL, 3073348038UL, 3260130211UL, 3478 | |
| | | 094562UL, 1285558544UL, 3716074330UL, 3098363790UL, 3039362306UL, 417565699 | |
| | | 4UL, 1731832828UL, 1275401375UL, 468159532UL, 3236754932UL, 1145506936UL, 1 | |
| | | 008905193UL, 1718801768UL, 1092964081UL, 1358079399UL, 827886816UL, 1722154 | |
| | | 800UL, 2242776733UL, 53128947UL, 1748663943UL, 249481170UL, 2608123425UL, 2 | |
| | | 087201889UL, 1960144614UL, 698076610UL, 841433255UL, 477543427UL, 187888270 | |
| | | 9UL, 1505419004UL, 3692794996UL, 3194913138UL, 2616490007UL, 357561212UL, 2 | |
| | | 823143358UL, 2771068186UL, 2512991743UL, 637464579UL, 1209132455UL, 1991737 | |
| | | 212UL, 27937268UL, 2455017713UL, 1749147666UL, 4020226825UL, 2907896812UL, | |
| | | 1673040956UL, 1837614076UL, 1227604028UL, 714577577UL, 3633969112UL, 528340 | |
| | | 088UL, 489077752UL, 216760440UL, 42151250UL, 2886728356UL, 1646215396UL, 41 | |
| | | 12075489UL, 3514761468UL, 4101921371UL, 3507686008UL, 2095821304UL, 4286077 | |
| | | 681UL, 198050301UL, 1858712743UL, 1463806912UL, 277300160UL, 3981929169UL, | |
| | | 2944140287UL, 2286578015UL, 4137888640UL, 1178044288UL, 2026367394UL, 32699 | |
| | | 86302UL, 3028402878UL, 1276820224UL, 2234191616UL, 294584132UL, 3086749695U | |
| | | L, 3324400975UL, 4274031104UL, 136978944UL, 543687304UL, 4008517630UL, 8363 | |
| | | 70334UL, 2978609152UL, 3275253760UL, 3578325264UL, 3221487612UL, 2671154748 | |
| | | UL, 2296777728UL, 3942394880UL, 2131029536UL, 2165829624UL, 697661816UL, 10 | |
| | | 86645248UL, 3265045504UL, 4259639360UL, 3423548400UL, 2416417776UL, 2295121 | |
| | | 920UL, 3943712768UL, 41950336UL, 3423478496UL, 2885608160UL, 3290486993UL, | |
| | | 2293593009UL, 4179285363UL, 1939838472UL, 1412147404UL, 3718742914UL, 71571 | |
| | | 4152UL, 3838244595UL, 2345662040UL, 2845657161UL, 3251034248UL, 3758048260U | |
| | | L, 138737288UL, 4075331504UL, 31014156UL, 2257801369UL, 370671650UL, 150339 | |
| | | 2345UL, 219309525UL, 246620060UL, 1375177854UL, 2219162331UL, 1759481152UL, | |
| | | 3236558869UL, 297169276UL, 2981812236UL, 135243402UL, 3898070400UL, 253166 | |
| | | 3658UL, 499723753UL, 1103465850UL, 3142293713UL, 2610231010UL, 2433358422UL | |
| | | , 1368961148UL, 2570001060UL, 486949791UL, 3903474593UL, 4172043647UL, 3379 | |
| | | 360748UL, 1922171925UL, 3172880550UL, 1267864331UL, 189423917UL, 1927379456 | |
| | | UL, 1359812359UL, 191463910UL, 3452816347UL, 2551113616UL, 1576396048UL, 25 | |
| | | 18549525UL, 4411574UL, 1222520631UL, 4292139924UL, 3555849728UL, 949028615U | |
| | | L, 3073348038UL, 3260130211UL, 3478094562UL, 1285558544UL, 4113039486UL, 30 | |
| | | 98363790UL, 3039362306UL, 4175656994UL, 1731832828UL, 1827471372UL, 4681595 | |
| | | 32UL, 3236754932UL, 1145506936UL, 1008905193UL, 1626341859UL, 1092964081UL, | |
| | | 1358079399UL, 827886816UL, 1722154800UL, 1069547583UL, 53128947UL, 1748663 | |
| | | 943UL, 249481170UL, 2608123425UL, 3162506114UL, 1960144614UL, 698076610UL, | |
| | | 841433255UL, 477543427UL, 3641706484UL, 1505419004UL, 3692794996UL, 3194913 | |
| | | 138UL, 2616490007UL, 3623882586UL, 2823143358UL, 2771068186UL, 2512991743UL | |
| | | , 637464579UL, 16785012UL, 1991737212UL, 27937268UL, 2455017713UL, 17491476 | |
| | | 66UL, 2348825660UL, 2907896812UL, 1673040956UL, 1837614076UL, 1227604028UL, | |
| | | 2579527800UL, 3633969112UL, 528340088UL, 489077752UL, 216760440UL, 3628134 | |
| | | 628UL, 2886728356UL, 1646215396UL, 4112075489UL, 3514761468UL, 1602085368UL | |
| | | , 3507686008UL, 2095821304UL, 4286077681UL, 198050301UL, 2501362624UL, 1463 | |
| | | 806912UL, 277300160UL, 3981929169UL, 2944140287UL, 4112467840UL, 4137888640 | |
| | | UL, 1178044288UL, 2026367394UL, 3269986302UL, 3356184320UL, 1276820224UL, 2 | |
| | | 234191616UL, 294584132UL, 3086749695UL, 366387712UL, 4274031104UL, 13697894 | |
| | | 4UL, 543687304UL, 4008517630UL, 1006135296UL, 2978609152UL, 3275253760UL, 3 | |
| | | 578325264UL, 3221487612UL, 3104844800UL, 2296777728UL, 3942394880UL, 213102 | |
| | | 9536UL, 2165829624UL, 1874371584UL, 1086645248UL, 3265045504UL, 4259639360U | |
| | | L, 3423548400UL, 2975352832UL, 2295121920UL, 3943712768UL, 41950336UL, 3423 | |
| | | 478496UL, 989898496UL, 3410688577UL, 2331788830UL, 3546482013UL, 813828841U | |
| | | L, 1865093068UL, 3265457506UL, 3795669738UL, 2119696024UL, 4285651426UL, 33 | |
| | | 33834629UL, 3451487261UL, 2090324595UL, 1816963648UL, 932961512UL, 24707610 | |
| | | 29UL, 3401764108UL, 3421619354UL, 4199624502UL, 589386372UL, 879396240UL, 3 | |
| | | 372470254UL, 2693109296UL, 2424215996UL, 38442268UL, 1882087724UL, 17139760 | |
| | | 0UL, 2024561281UL, 183095586UL, 3282207272UL, 3402177296UL, 1859195498UL, 4 | |
| | | 13109947UL, 2839537944UL, 1632143648UL, 3742715856UL, 388696500UL, 17487037 | |
| | | 33UL, 3563198567UL, 3826785440UL, 2896086528UL, 3989037829UL, 1478787788UL, | |
| | | 1390277813UL, 2123320736UL, 3416516800UL, 2056564203UL, 2584895011UL, 1605 | |
| | | 192736UL, 2475623616UL, 3856499712UL, 3439657984UL, 708088129UL, 1501395566 | |
| | | UL, 1302184960UL, 1360092352UL, 1645630430UL, 1425230387UL, 3369488824UL, 2 | |
| | | 979863936UL, 869212432UL, 150548847UL, 1097557362UL, 655939640UL, 316553344 | |
| | | UL, 3761918508UL, 3958338094UL, 141744600UL, 1412214640UL, 1859689984UL, 32 | |
| | | 00680981UL, 3883058679UL, 999801880UL, 3946079738UL, 1876072704UL, 19438184 | |
| | | 9UL, 2177533995UL, 1584707624UL, 3053768410UL, 2593051904UL, 3458076673UL, | |
| | | 4047442835UL, 3545972808UL, 3441793178UL, 194975744UL, 1731731470UL, 416875 | |
| | | 5162UL, 2628944732UL, 2125675784UL, 3119906816UL, 960774145UL, 2646626078UL | |
| | | , 2152793157UL, 3049156634UL, 672464896UL, 3046932493UL, 3700727536UL, 2152 | |
| | | 335477UL, 575986696UL, 671940608UL, 2208366608UL, 1454456125UL, 937760016UL | |
| | | , 4103979069UL, 2737668096UL, 1179779104UL, 1030912634UL, 1041902112UL, 203 | |
| | | 2909434UL, 2274230272UL, 2089025605UL, 3050632421UL, 2428784965UL, 14065814 | |
| | | 9UL, 4254138368UL, 1745354889UL, 711584249UL, 2746523017UL, 2551006457UL, 1 | |
| | | 100808192UL, 1494221073UL, 3422999489UL, 2696954129UL, 976716737UL, 2653421 | |
| | | 568UL, 3806331426UL, 3690047362UL, 1481392674UL, 3817015170UL, 2353004544UL | |
| | | , 286262340UL, 2300534532UL, 4206449732UL, 15339268UL, 2894069760UL, 488376 | |
| | | 456UL, 1489927688UL, 1196583048UL, 652746248UL, 2214592512UL, 69904UL, 1006 | |
| | | 205200UL, 2322628880UL, 1229515024UL, 2617245696UL, 3423527456UL, 196495312 | |
| | | 0UL, 4260938272UL, 386199072UL, 1744830464UL, 1342444608UL, 1069330496UL, 2 | |
| | | 138592320UL, 3185897536UL, 1073741824UL, 1342493824UL, 3780942976UL, 177106 | |
| | | 6496UL, 2189433984UL, 2147483648UL, }, {1804684571UL, 2106089606UL, 1533056 | |
| | | 158UL, 2870216110UL, 3618155659UL, 3789871366UL, 4246691682UL, 3667072763UL | |
| | | , 1212241769UL, 3152390668UL, 2973497449UL, 2958641966UL, 2088805328UL, 717 | |
| | | 518631UL, 2401090860UL, 3606967204UL, 952637656UL, 59827581UL, 1291486682UL | |
| | | , 1499453515UL, 2053994857UL, 563998083UL, 4094000396UL, 1163546899UL, 1003 | |
| | | 843565UL, 654565639UL, 1070907026UL, 4217851863UL, 426034251UL, 1721352737U | |
| | | L, 278404469UL, 3899800390UL, 1063362170UL, 1162348262UL, 3153545093UL, 324 | |
| | | 9996223UL, 186674553UL, 2616406148UL, 3137968354UL, 1282784965UL, 149506805 | |
| | | 8UL, 3033760361UL, 2278144523UL, 3192245769UL, 719586342UL, 2602548287UL, 3 | |
| | | 386583150UL, 355354345UL, 3252815848UL, 2178056037UL, 2283016801UL, 3005955 | |
| | | 037UL, 3340254490UL, 802791670UL, 251122316UL, 3705188626UL, 1252262272UL, | |
| | | 3989036796UL, 3527490452UL, 2047131255UL, 1447170583UL, 3373930285UL, 28950 | |
| | | 37457UL, 209341805UL, 1820357643UL, 3712392731UL, 685796521UL, 1322920440UL | |
| | | , 814388470UL, 1357857147UL, 434430265UL, 2650681935UL, 1371566728UL, 58783 | |
| | | 716UL, 2273435933UL, 3498513198UL, 792571900UL, 1447808772UL, 3513385860UL, | |
| | | 99175889UL, 1105434360UL, 1484146625UL, 3327194068UL, 242672513UL, 3552105 | |
| | | 593UL, 1425844616UL, 2871928454UL, 1124633561UL, 607610433UL, 2130018608UL, | |
| | | 1610235673UL, 2844230432UL, 2748082340UL, 994392866UL, 450823250UL, 291253 | |
| | | 5126UL, 2574390988UL, 3974009252UL, 78696582UL, 649682891UL, 3980917176UL, | |
| | | 3221419689UL, 960695436UL, 729221508UL, 358358845UL, 3392407691UL, 47271100 | |
| | | 5UL, 295914899UL, 3005191796UL, 3078521977UL, 3370011868UL, 509135340UL, 19 | |
| | | 65939519UL, 2086465877UL, 2457949822UL, 1324152522UL, 762289386UL, 36186939 | |
| | | 97UL, 233730715UL, 2873984650UL, 31168606UL, 3367142977UL, 2851851305UL, 32 | |
| | | 51660053UL, 4209768406UL, 3298190175UL, 901235185UL, 1564391510UL, 23526865 | |
| | | 27UL, 1008150482UL, 578573310UL, 3462447127UL, 2482873876UL, 1790221257UL, | |
| | | 2255375608UL, 2335345651UL, 1381450613UL, 2866805101UL, 1495073163UL, 51990 | |
| | | 5259UL, 3184556473UL, 1076378339UL, 2692926127UL, 970097715UL, 4013407916UL | |
| | | , 4014350363UL, 2476927059UL, 1989070516UL, 2640060069UL, 1987784589UL, 188 | |
| | | 0989003UL, 3861138803UL, 451743296UL, 1987067871UL, 1975657871UL, 339781688 | |
| | | 2UL, 2309900530UL, 4108425851UL, 4063867233UL, 3319482186UL, 2621772886UL, | |
| | | 1804684571UL, 2106089606UL, 1533056158UL, 2870216110UL, 611557097UL, 378987 | |
| | | 1366UL, 4246691682UL, 3667072763UL, 1212241769UL, 3389551988UL, 2973497449U | |
| | | L, 2958641966UL, 2088805328UL, 717518631UL, 2460955430UL, 3606967204UL, 952 | |
| | | 637656UL, 59827581UL, 1291486682UL, 3531087304UL, 2053994857UL, 563998083UL | |
| | | , 4094000396UL, 1163546899UL, 1242934125UL, 654565639UL, 1070907026UL, 4217 | |
| | | 851863UL, 426034251UL, 3034416129UL, 278404469UL, 3899800390UL, 1063362170U | |
| | | L, 1162348262UL, 4258714417UL, 3249996223UL, 186674553UL, 2616406148UL, 313 | |
| | | 7968354UL, 639885806UL, 1495068058UL, 3033760361UL, 2278144523UL, 319224576 | |
| | | 9UL, 4159910300UL, 2602548287UL, 3386583150UL, 355354345UL, 3252815848UL, 1 | |
| | | 555885880UL, 2283016801UL, 3005955037UL, 3340254490UL, 802791670UL, 2948774 | |
| | | 612UL, 3705188626UL, 1252262272UL, 3989036796UL, 3527490452UL, 2107826711UL | |
| | | , 1447170583UL, 3373930285UL, 2895037457UL, 209341805UL, 3763367196UL, 3712 | |
| | | 392731UL, 685796521UL, 1322920440UL, 814388470UL, 1986168339UL, 434430265UL | |
| | | , 2650681935UL, 1371566728UL, 58783716UL, 1423189187UL, 3498513198UL, 79257 | |
| | | 1900UL, 1447808772UL, 3513385860UL, 315969823UL, 1105434360UL, 1484146625UL | |
| | | , 3327194068UL, 242672513UL, 3336228275UL, 1425844616UL, 2871928454UL, 1124 | |
| | | 633561UL, 607610433UL, 1762052458UL, 1610235673UL, 2844230432UL, 2748082340 | |
| | | UL, 994392866UL, 3771702243UL, 2912535126UL, 2574390988UL, 3974009252UL, 78 | |
| | | 696582UL, 1626628844UL, 3980917176UL, 3221419689UL, 960695436UL, 729221508U | |
| | | L, 382092233UL, 3392407691UL, 472711005UL, 295914899UL, 3005191796UL, 51429 | |
| | | 7204UL, 3370011868UL, 509135340UL, 1965939519UL, 2086465877UL, 3975975091UL | |
| | | , 1324152522UL, 762289386UL, 3618693997UL, 233730715UL, 455322516UL, 311686 | |
| | | 06UL, 3367142977UL, 2851851305UL, 3251660053UL, 3952189603UL, 3298190175UL, | |
| | | 901235185UL, 1564391510UL, 2352686527UL, 826181452UL, 578573310UL, 3462447 | |
| | | 127UL, 2482873876UL, 1790221257UL, 1529242773UL, 2335345651UL, 1381450613UL | |
| | | , 2866805101UL, 1495073163UL, 877718651UL, 3184556473UL, 1076378339UL, 2692 | |
| | | 926127UL, 970097715UL, 299344245UL, 4014350363UL, 2476927059UL, 1989070516U | |
| | | L, 2640060069UL, 3844531327UL, 1880989003UL, 3861138803UL, 451743296UL, 198 | |
| | | 7067871UL, 3272848161UL, 3397816882UL, 2309900530UL, 4108425851UL, 40638672 | |
| | | 33UL, 834288064UL, 2621772886UL, 1804684571UL, 2106089606UL, 1533056158UL, | |
| | | 304865970UL, 611557097UL, 3789871366UL, 4246691682UL, 3667072763UL, 2728206 | |
| | | 193UL, 3389551988UL, 2973497449UL, 2958641966UL, 2088805328UL, 3895037582UL | |
| | | , 2460955430UL, 3606967204UL, 952637656UL, 59827581UL, 2349212526UL, 353108 | |
| | | 7304UL, 2053994857UL, 563998083UL, 4094000396UL, 4028900485UL, 1242934125UL | |
| | | , 654565639UL, 1070907026UL, 4217851863UL, 1663452176UL, 3034416129UL, 2784 | |
| | | 04469UL, 3899800390UL, 1063362170UL, 2721441405UL, 4258714417UL, 3249996223 | |
| | | UL, 186674553UL, 2616406148UL, 4228837490UL, 639885806UL, 1495068058UL, 303 | |
| | | 3760361UL, 2278144523UL, 2820661772UL, 4159910300UL, 2602548287UL, 33865831 | |
| | | 50UL, 355354345UL, 1815256314UL, 1555885880UL, 2283016801UL, 3005955037UL, | |
| | | 3340254490UL, 2166514144UL, 2948774612UL, 3705188626UL, 1252262272UL, 39890 | |
| | | 36796UL, 751187322UL, 2107826711UL, 1447170583UL, 3373930285UL, 2895037457U | |
| | | L, 2809311944UL, 3763367196UL, 3712392731UL, 685796521UL, 1322920440UL, 936 | |
| | | 300677UL, 1986168339UL, 434430265UL, 2650681935UL, 1371566728UL, 1308015359 | |
| | | UL, 1423189187UL, 3498513198UL, 792571900UL, 1447808772UL, 3065349526UL, 31 | |
| | | 5969823UL, 1105434360UL, 1484146625UL, 3327194068UL, 1038676789UL, 33362282 | |
| | | 75UL, 1425844616UL, 2871928454UL, 1124633561UL, 2956422231UL, 1762052458UL, | |
| | | 1610235673UL, 2844230432UL, 2748082340UL, 3603862093UL, 3771702243UL, 2912 | |
| | | 535126UL, 2574390988UL, 3974009252UL, 1691332448UL, 1626628844UL, 398091717 | |
| | | 6UL, 3221419689UL, 960695436UL, 3120142427UL, 382092233UL, 3392407691UL, 47 | |
| | | 2711005UL, 295914899UL, 4101686983UL, 514297204UL, 3370011868UL, 509135340U | |
| | | L, 1965939519UL, 3015736706UL, 3975975091UL, 1324152522UL, 762289386UL, 361 | |
| | | 8693997UL, 2395097989UL, 455322516UL, 31168606UL, 3367142977UL, 2851851305U | |
| | | L, 30511955UL, 3952189603UL, 3298190175UL, 901235185UL, 1564391510UL, 26062 | |
| | | 98633UL, 826181452UL, 578573310UL, 3462447127UL, 2482873876UL, 4159642946UL | |
| | | , 1529242773UL, 2335345651UL, 1381450613UL, 2866805101UL, 1782913669UL, 877 | |
| | | 718651UL, 3184556473UL, 1076378339UL, 2692926127UL, 1730328819UL, 299344245 | |
| | | UL, 4014350363UL, 2476927059UL, 1989070516UL, 1425685614UL, 3844531327UL, 1 | |
| | | 880989003UL, 3861138803UL, 451743296UL, 889237383UL, 3272848161UL, 33978168 | |
| | | 82UL, 2309900530UL, 4108425851UL, 1155723231UL, 834288064UL, 2621772886UL, | |
| | | 1804684571UL, 2106089606UL, 2387009004UL, 304865970UL, 611557097UL, 3789871 | |
| | | 366UL, 4246691682UL, 1405709661UL, 2728206193UL, 3389551988UL, 2973497449UL | |
| | | , 2958641966UL, 3183906006UL, 3895037582UL, 2460955430UL, 3606967204UL, 952 | |
| | | 637656UL, 1345432763UL, 2349212526UL, 3531087304UL, 2053994857UL, 563998083 | |
| | | UL, 3749011414UL, 4028900485UL, 1242934125UL, 654565639UL, 1070907026UL, 10 | |
| | | 72342672UL, 1663452176UL, 3034416129UL, 278404469UL, 3899800390UL, 35666521 | |
| | | 88UL, 2721441405UL, 4258714417UL, 3249996223UL, 186674553UL, 4001263143UL, | |
| | | 4228837490UL, 639885806UL, 1495068058UL, 3033760361UL, 4278332644UL, 282066 | |
| | | 1772UL, 4159910300UL, 2602548287UL, 3386583150UL, 838831089UL, 1815256314UL | |
| | | , 1555885880UL, 2283016801UL, 3005955037UL, 3377397178UL, 2166514144UL, 294 | |
| | | 8774612UL, 3705188626UL, 1252262272UL, 2414422575UL, 751187322UL, 210782671 | |
| | | 1UL, 1447170583UL, 3373930285UL, 1253755033UL, 2809311944UL, 3763367196UL, | |
| | | 3712392731UL, 685796521UL, 3238624475UL, 936300677UL, 1986168339UL, 4344302 | |
| | | 65UL, 2650681935UL, 1642290570UL, 1308015359UL, 1423189187UL, 3498513198UL, | |
| | | 792571900UL, 173318140UL, 3065349526UL, 315969823UL, 1105434360UL, 1484146 | |
| | | 625UL, 4103797777UL, 1038676789UL, 3336228275UL, 1425844616UL, 2871928454UL | |
| | | , 1797745765UL, 2956422231UL, 1762052458UL, 1610235673UL, 2844230432UL, 218 | |
| | | 0656608UL, 3603862093UL, 3771702243UL, 2912535126UL, 2574390988UL, 11830983 | |
| | | 90UL, 1691332448UL, 1626628844UL, 3980917176UL, 3221419689UL, 2645203959UL, | |
| | | 3120142427UL, 382092233UL, 3392407691UL, 472711005UL, 1659659070UL, 410168 | |
| | | 6983UL, 514297204UL, 3370011868UL, 509135340UL, 483888155UL, 3015736706UL, | |
| | | 3975975091UL, 1324152522UL, 762289386UL, 1259948064UL, 2395097989UL, 455322 | |
| | | 516UL, 31168606UL, 3367142977UL, 339990414UL, 30511955UL, 3952189603UL, 329 | |
| | | 8190175UL, 901235185UL, 3097920065UL, 2606298633UL, 826181452UL, 578573310U | |
| | | L, 3462447127UL, 1548039839UL, 4159642946UL, 1529242773UL, 2335345651UL, 13 | |
| | | 81450613UL, 2173079994UL, 1782913669UL, 877718651UL, 3184556473UL, 10763783 | |
| | | 39UL, 1570275057UL, 1730328819UL, 299344245UL, 4014350363UL, 2476927059UL, | |
| | | 1845882881UL, 1425685614UL, 3844531327UL, 1880989003UL, 3861138803UL, 13224 | |
| | | 09081UL, 889237383UL, 3272848161UL, 3397816882UL, 2309900530UL, 3505447982U | |
| | | L, 3430136873UL, 1319796589UL, 4202423979UL, 3184732284UL, 2910356648UL, 25 | |
| | | 34615223UL, 3854465731UL, 768821792UL, 2205052576UL, 1348983754UL, 13002501 | |
| | | 88UL, 2919181738UL, 2520178732UL, 3967243685UL, 2646012002UL, 1784678658UL, | |
| | | 741302051UL, 3464753547UL, 194213376UL, 1482799064UL, 3009673860UL, 680824 | |
| | | 208UL, 741966796UL, 2381283369UL, 3022877171UL, 1619439814UL, 3961433610UL, | |
| | | 1331297670UL, 1100110820UL, 1311672539UL, 1122110615UL, 4056004850UL, 3413 | |
| | | 790176UL, 3148768822UL, 1242592694UL, 2925975727UL, 1879285134UL, 334328879 | |
| | | UL, 1318235222UL, 3140739559UL, 401691770UL, 3604288404UL, 3686496908UL, 77 | |
| | | 0670945UL, 199139043UL, 2092710473UL, 3914528993UL, 700991333UL, 2375775811 | |
| | | UL, 858137308UL, 3490050165UL, 2389078291UL, 1615607459UL, 3027969809UL, 82 | |
| | | 0012549UL, 2085659484UL, 2654485136UL, 2630408646UL, 196481396UL, 111967327 | |
| | | 4UL, 1026209692UL, 726501622UL, 2940737143UL, 3559571163UL, 2288027726UL, 1 | |
| | | 039212708UL, 929664536UL, 1061981465UL, 186058675UL, 3537656152UL, 84417679 | |
| | | 6UL, 2996217992UL, 1545798611UL, 3031020656UL, 2248030435UL, 1665857580UL, | |
| | | 2905758082UL, 1269201312UL, 3031275084UL, 4034872841UL, 983632400UL, 418850 | |
| | | 3190UL, 757119675UL, 2105920865UL, 4281032819UL, 2917801076UL, 3900010013UL | |
| | | , 3910997169UL, 1729751422UL, 562313247UL, 3070846353UL, 2564238664UL, 4050 | |
| | | 540186UL, 4258833501UL, 2270666053UL, 2207128401UL, 2990540001UL, 797768898 | |
| | | UL, 2288390225UL, 3230323685UL, 1974727440UL, 3327301426UL, 289857826UL, 35 | |
| | | 65889868UL, 2791014422UL, 2021097820UL, 3350378271UL, 3673707591UL, 2610067 | |
| | | 927UL, 4255789547UL, 2682856590UL, 12563128UL, 1397542366UL, 237149400UL, 2 | |
| | | 233707508UL, 3875573245UL, 2097374144UL, 175320773UL, 4103445984UL, 4089284 | |
| | | 323UL, 3610168130UL, 3084915964UL, 680145366UL, 2571684685UL, 1132894909UL, | |
| | | 104640024UL, 193765521UL, 2338202907UL, 895271448UL, 11499099UL, 179806641 | |
| | | 7UL, 1297412626UL, 2511347162UL, 3140535007UL, 2129963538UL, 700683199UL, 2 | |
| | | 609700278UL, 2953463279UL, 2290844145UL, 1871316353UL, 3993801787UL, 221941 | |
| | | 3182UL, 2954453701UL, 231283580UL, 1375331115UL, 207723994UL, 1799562537UL, | |
| | | 2056553564UL, 2513609799UL, 3542459627UL, 3173012714UL, 3923404932UL, 2178 | |
| | | 77755UL, 2095124912UL, 192024370UL, 1168134987UL, 1889598668UL, 3014873069U | |
| | | L, 2033573343UL, }, {3465348660UL, 3623545008UL, 3505902593UL, 838034830UL, | |
| | | 1338018789UL, 2595329276UL, 3367746385UL, 3197935201UL, 1439351946UL, 3585 | |
| | | 085571UL, 4165798087UL, 3634792639UL, 2359485974UL, 2772582925UL, 111018620 | |
| | | 3UL, 3771562484UL, 1508694157UL, 1564641206UL, 2801985736UL, 2446107936UL, | |
| | | 3849126897UL, 1842973671UL, 944408104UL, 2624631280UL, 2729080685UL, 373736 | |
| | | 8614UL, 858809173UL, 2289802345UL, 2428186575UL, 3114742765UL, 716011303UL, | |
| | | 3443810690UL, 814132610UL, 517432787UL, 614445393UL, 2930433345UL, 2911780 | |
| | | 98UL, 2117644502UL, 2749446703UL, 311745701UL, 365684723UL, 1705418876UL, 2 | |
| | | 213749318UL, 4011417220UL, 1842575651UL, 988348831UL, 94258998UL, 277115027 | |
| | | 2UL, 498058526UL, 1344827813UL, 2961955291UL, 262703473UL, 1404034822UL, 15 | |
| | | 66595865UL, 2522381203UL, 1706522206UL, 1203054806UL, 1273801539UL, 2070583 | |
| | | 465UL, 3913449936UL, 3231505231UL, 619636751UL, 3746997351UL, 4103027837UL, | |
| | | 1205468203UL, 3355878253UL, 3433356888UL, 107785753UL, 2779092609UL, 18696 | |
| | | 91566UL, 2555219983UL, 903319808UL, 3273374169UL, 2538926990UL, 979533870UL | |
| | | , 1356500860UL, 1661983738UL, 1380761625UL, 2919458459UL, 1041142798UL, 143 | |
| | | 0817627UL, 517007606UL, 1421570516UL, 2371447300UL, 2985632691UL, 368488935 | |
| | | 1UL, 3873926653UL, 788770697UL, 1854750277UL, 209332297UL, 1137299679UL, 84 | |
| | | 8527832UL, 3850486924UL, 4179307312UL, 2764470693UL, 1353191605UL, 41668919 | |
| | | 19UL, 2074703841UL, 3373997532UL, 2013528640UL, 701389744UL, 841917592UL, 2 | |
| | | 065742268UL, 2721848192UL, 2566956680UL, 3122896007UL, 1090761479UL, 921859 | |
| | | 028UL, 4086736376UL, 1837462309UL, 2579826431UL, 2436217134UL, 839037727UL, | |
| | | 1072086642UL, 614518622UL, 3764758228UL, 1501128342UL, 3669108708UL, 16014 | |
| | | 07381UL, 2899014005UL, 3268308948UL, 3337564231UL, 1986911578UL, 3379194930 | |
| | | UL, 1950365753UL, 2098537451UL, 51515980UL, 1176526086UL, 3213391582UL, 105 | |
| | | 9745735UL, 2273586703UL, 376085505UL, 1493749800UL, 3970342143UL, 162092524 | |
| | | 4UL, 2165301314UL, 2332030190UL, 1864098798UL, 276747442UL, 2776569227UL, 2 | |
| | | 992780663UL, 3027279789UL, 1074555384UL, 3481518659UL, 2499703783UL, 661805 | |
| | | 703UL, 3782305562UL, 9186074UL, 2357407210UL, 2355922343UL, 2024733363UL, 4 | |
| | | 85434612UL, 862379913UL, 1029706268UL, 1512726310UL, 3834948354UL, 14358928 | |
| | | 40UL, 3297980694UL, 2831553800UL, 2111416471UL, 711321697UL, 3465348660UL, | |
| | | 3623545008UL, 3505902593UL, 838034830UL, 1553436793UL, 2595329276UL, 336774 | |
| | | 6385UL, 3197935201UL, 1439351946UL, 3198044157UL, 4165798087UL, 3634792639U | |
| | | L, 2359485974UL, 2772582925UL, 836042976UL, 3771562484UL, 1508694157UL, 156 | |
| | | 4641206UL, 2801985736UL, 1190371491UL, 3849126897UL, 1842973671UL, 94440810 | |
| | | 4UL, 2624631280UL, 410746791UL, 3737368614UL, 858809173UL, 2289802345UL, 24 | |
| | | 28186575UL, 1542325976UL, 716011303UL, 3443810690UL, 814132610UL, 517432787 | |
| | | UL, 1649301063UL, 2930433345UL, 291178098UL, 2117644502UL, 2749446703UL, 39 | |
| | | 55511579UL, 365684723UL, 1705418876UL, 2213749318UL, 4011417220UL, 27536328 | |
| | | 62UL, 988348831UL, 94258998UL, 2771150272UL, 498058526UL, 3314106168UL, 296 | |
| | | 1955291UL, 262703473UL, 1404034822UL, 1566595865UL, 3590367097UL, 170652220 | |
| | | 6UL, 1203054806UL, 1273801539UL, 2070583465UL, 2340683261UL, 3231505231UL, | |
| | | 619636751UL, 3746997351UL, 4103027837UL, 2785398766UL, 3355878253UL, 343335 | |
| | | 6888UL, 107785753UL, 2779092609UL, 1608451840UL, 2555219983UL, 903319808UL, | |
| | | 3273374169UL, 2538926990UL, 645164419UL, 1356500860UL, 1661983738UL, 13807 | |
| | | 61625UL, 2919458459UL, 2260224548UL, 1430817627UL, 517007606UL, 1421570516U | |
| | | L, 2371447300UL, 1636004496UL, 3684889351UL, 3873926653UL, 788770697UL, 185 | |
| | | 4750277UL, 1345251011UL, 1137299679UL, 848527832UL, 3850486924UL, 417930731 | |
| | | 2UL, 3576574608UL, 1353191605UL, 4166891919UL, 2074703841UL, 3373997532UL, | |
| | | 183447754UL, 701389744UL, 841917592UL, 2065742268UL, 2721848192UL, 21092898 | |
| | | 91UL, 3122896007UL, 1090761479UL, 921859028UL, 4086736376UL, 2212730874UL, | |
| | | 2579826431UL, 2436217134UL, 839037727UL, 1072086642UL, 55934784UL, 37647582 | |
| | | 28UL, 1501128342UL, 3669108708UL, 1601407381UL, 516550987UL, 3268308948UL, | |
| | | 3337564231UL, 1986911578UL, 3379194930UL, 3973484473UL, 2098537451UL, 51515 | |
| | | 980UL, 1176526086UL, 3213391582UL, 4251661633UL, 2273586703UL, 376085505UL, | |
| | | 1493749800UL, 3970342143UL, 3190791788UL, 2165301314UL, 2332030190UL, 1864 | |
| | | 098798UL, 276747442UL, 2991976613UL, 2992780663UL, 3027279789UL, 1074555384 | |
| | | UL, 3481518659UL, 1399789494UL, 661805703UL, 3782305562UL, 9186074UL, 23574 | |
| | | 07210UL, 1942736967UL, 2024733363UL, 485434612UL, 862379913UL, 1029706268UL | |
| | | , 4122704494UL, 3834948354UL, 1435892840UL, 3297980694UL, 2831553800UL, 121 | |
| | | 0092654UL, 711321697UL, 3465348660UL, 3623545008UL, 3505902593UL, 344323119 | |
| | | 8UL, 1553436793UL, 2595329276UL, 3367746385UL, 3197935201UL, 1304974987UL, | |
| | | 3198044157UL, 4165798087UL, 3634792639UL, 2359485974UL, 3518323362UL, 83604 | |
| | | 2976UL, 3771562484UL, 1508694157UL, 1564641206UL, 3577633375UL, 1190371491U | |
| | | L, 3849126897UL, 1842973671UL, 944408104UL, 1854555112UL, 410746791UL, 3737 | |
| | | 368614UL, 858809173UL, 2289802345UL, 3622671731UL, 1542325976UL, 716011303U | |
| | | L, 3443810690UL, 814132610UL, 296197011UL, 1649301063UL, 2930433345UL, 2911 | |
| | | 78098UL, 2117644502UL, 1056271538UL, 3955511579UL, 365684723UL, 1705418876U | |
| | | L, 2213749318UL, 1258535671UL, 2753632862UL, 988348831UL, 94258998UL, 27711 | |
| | | 50272UL, 3669902097UL, 3314106168UL, 2961955291UL, 262703473UL, 1404034822U | |
| | | L, 1654433938UL, 3590367097UL, 1706522206UL, 1203054806UL, 1273801539UL, 24 | |
| | | 48138887UL, 2340683261UL, 3231505231UL, 619636751UL, 3746997351UL, 14540883 | |
| | | 94UL, 2785398766UL, 3355878253UL, 3433356888UL, 107785753UL, 689323470UL, 1 | |
| | | 608451840UL, 2555219983UL, 903319808UL, 3273374169UL, 1603842392UL, 6451644 | |
| | | 19UL, 1356500860UL, 1661983738UL, 1380761625UL, 2814639423UL, 2260224548UL, | |
| | | 1430817627UL, 517007606UL, 1421570516UL, 1938805701UL, 1636004496UL, 36848 | |
| | | 89351UL, 3873926653UL, 788770697UL, 4238900666UL, 1345251011UL, 1137299679U | |
| | | L, 848527832UL, 3850486924UL, 108793827UL, 3576574608UL, 1353191605UL, 4166 | |
| | | 891919UL, 2074703841UL, 3780897861UL, 183447754UL, 701389744UL, 841917592UL | |
| | | , 2065742268UL, 3036602746UL, 2109289891UL, 3122896007UL, 1090761479UL, 921 | |
| | | 859028UL, 3499985398UL, 2212730874UL, 2579826431UL, 2436217134UL, 839037727 | |
| | | UL, 3520354700UL, 55934784UL, 3764758228UL, 1501128342UL, 3669108708UL, 160 | |
| | | 1010847UL, 516550987UL, 3268308948UL, 3337564231UL, 1986911578UL, 270424178 | |
| | | 1UL, 3973484473UL, 2098537451UL, 51515980UL, 1176526086UL, 3602010532UL, 42 | |
| | | 51661633UL, 2273586703UL, 376085505UL, 1493749800UL, 2922957328UL, 31907917 | |
| | | 88UL, 2165301314UL, 2332030190UL, 1864098798UL, 1649666443UL, 2991976613UL, | |
| | | 2992780663UL, 3027279789UL, 1074555384UL, 2848531519UL, 1399789494UL, 6618 | |
| | | 05703UL, 3782305562UL, 9186074UL, 320781315UL, 1942736967UL, 2024733363UL, | |
| | | 485434612UL, 862379913UL, 3598892066UL, 4122704494UL, 3834948354UL, 1435892 | |
| | | 840UL, 3297980694UL, 545184652UL, 1210092654UL, 711321697UL, 3465348660UL, | |
| | | 3623545008UL, 1173753045UL, 3443231198UL, 1553436793UL, 2595329276UL, 33677 | |
| | | 46385UL, 2444634476UL, 1304974987UL, 3198044157UL, 4165798087UL, 3634792639 | |
| | | UL, 1837035806UL, 3518323362UL, 836042976UL, 3771562484UL, 1508694157UL, 28 | |
| | | 99021294UL, 3577633375UL, 1190371491UL, 3849126897UL, 1842973671UL, 1614215 | |
| | | 215UL, 1854555112UL, 410746791UL, 3737368614UL, 858809173UL, 525745365UL, 3 | |
| | | 622671731UL, 1542325976UL, 716011303UL, 3443810690UL, 566299749UL, 29619701 | |
| | | 1UL, 1649301063UL, 2930433345UL, 291178098UL, 1987532525UL, 1056271538UL, 3 | |
| | | 955511579UL, 365684723UL, 1705418876UL, 2321222760UL, 1258535671UL, 2753632 | |
| | | 862UL, 988348831UL, 94258998UL, 2986060366UL, 3669902097UL, 3314106168UL, 2 | |
| | | 961955291UL, 262703473UL, 604452796UL, 1654433938UL, 3590367097UL, 17065222 | |
| | | 06UL, 1203054806UL, 1894894069UL, 2448138887UL, 2340683261UL, 3231505231UL, | |
| | | 619636751UL, 6680729UL, 1454088394UL, 2785398766UL, 3355878253UL, 34333568 | |
| | | 88UL, 2025591660UL, 689323470UL, 1608451840UL, 2555219983UL, 903319808UL, 3 | |
| | | 430384385UL, 1603842392UL, 645164419UL, 1356500860UL, 1661983738UL, 2108736 | |
| | | 152UL, 2814639423UL, 2260224548UL, 1430817627UL, 517007606UL, 2973658959UL, | |
| | | 1938805701UL, 1636004496UL, 3684889351UL, 3873926653UL, 2283691941UL, 4238 | |
| | | 900666UL, 1345251011UL, 1137299679UL, 848527832UL, 45551112UL, 108793827UL, | |
| | | 3576574608UL, 1353191605UL, 4166891919UL, 3776615962UL, 3780897861UL, 1834 | |
| | | 47754UL, 701389744UL, 841917592UL, 3830639316UL, 3036602746UL, 2109289891UL | |
| | | , 3122896007UL, 1090761479UL, 1931255897UL, 3499985398UL, 2212730874UL, 257 | |
| | | 9826431UL, 2436217134UL, 3272166055UL, 3520354700UL, 55934784UL, 3764758228 | |
| | | UL, 1501128342UL, 1567864246UL, 1601010847UL, 516550987UL, 3268308948UL, 33 | |
| | | 37564231UL, 3918802424UL, 2704241781UL, 3973484473UL, 2098537451UL, 5151598 | |
| | | 0UL, 3551394489UL, 3602010532UL, 4251661633UL, 2273586703UL, 376085505UL, 8 | |
| | | 85459498UL, 2922957328UL, 3190791788UL, 2165301314UL, 2332030190UL, 3197056 | |
| | | 515UL, 1649666443UL, 2991976613UL, 2992780663UL, 3027279789UL, 2385348906UL | |
| | | , 2848531519UL, 1399789494UL, 661805703UL, 3782305562UL, 2163075465UL, 3207 | |
| | | 81315UL, 1942736967UL, 2024733363UL, 485434612UL, 2680597981UL, 3598892066U | |
| | | L, 4122704494UL, 3834948354UL, 1435892840UL, 2499644163UL, 2704575422UL, 25 | |
| | | 79557838UL, 673530532UL, 493730767UL, 1124557747UL, 1908629439UL, 282194950 | |
| | | 4UL, 1743112513UL, 2849457841UL, 2344409314UL, 3479159262UL, 4260973770UL, | |
| | | 2991970754UL, 3812641863UL, 2229319917UL, 2466968521UL, 1766353737UL, 32165 | |
| | | 91612UL, 2113272648UL, 364370737UL, 1893001758UL, 2608875275UL, 4224057183U | |
| | | L, 3546705413UL, 1999778009UL, 348872225UL, 2470564216UL, 1417878284UL, 270 | |
| | | 9790112UL, 3579129936UL, 2137971615UL, 4046639861UL, 2841156930UL, 39154473 | |
| | | 7UL, 2056567354UL, 737657378UL, 3877904725UL, 578930752UL, 1759172471UL, 33 | |
| | | 83278785UL, 1047197514UL, 649468151UL, 3452867243UL, 1792089520UL, 63936215 | |
| | | UL, 3909143729UL, 3753489875UL, 734314122UL, 2490530916UL, 3043874586UL, 15 | |
| | | 04812057UL, 59001199UL, 2493748676UL, 2552438622UL, 1889694845UL, 371539786 | |
| | | 0UL, 2817245010UL, 3841049206UL, 816106718UL, 2176130406UL, 640254735UL, 12 | |
| | | 376903UL, 3000264936UL, 3304116079UL, 1620334094UL, 2109391765UL, 134821095 | |
| | | 1UL, 2237645681UL, 1207768272UL, 1562894669UL, 2156631655UL, 1387193235UL, | |
| | | 3154858817UL, 633510901UL, 2312190757UL, 402878244UL, 2501565021UL, 2984409 | |
| | | 334UL, 4167491216UL, 3614267292UL, 3078552271UL, 971722322UL, 3065543880UL, | |
| | | 2307584190UL, 491480322UL, 2068673112UL, 1929780632UL, 178549964UL, 983979 | |
| | | 983UL, 2769314886UL, 4214442042UL, 2977609682UL, 25450683UL, 3075212658UL, | |
| | | 1571149568UL, 3531670561UL, 42782504UL, 425601306UL, 428715214UL, 497250251 | |
| | | UL, 693520802UL, 166426814UL, 1786382125UL, 2712003995UL, 3610802197UL, 207 | |
| | | 6490757UL, 404822980UL, 3953184772UL, 1655231947UL, 3594351577UL, 306823227 | |
| | | 4UL, 3771730346UL, 4110519574UL, 3534704897UL, 2375277865UL, 3597780202UL, | |
| | | 3472676002UL, 1350276449UL, 3218248239UL, 3589255283UL, 3253132633UL, 17698 | |
| | | 85529UL, 3792812294UL, 120332643UL, 1219374788UL, 3608889019UL, 2386099811U | |
| | | L, 858495304UL, 1284785543UL, 331370962UL, 2259419662UL, 2519864134UL, 3194 | |
| | | 739432UL, 2669074511UL, 2565559140UL, 3378072004UL, 2647801475UL, 265068954 | |
| | | UL, 1464416963UL, 1232787612UL, 4160089759UL, 2510685972UL, 670300081UL, 25 | |
| | | 09357766UL, 1981891975UL, 4161588397UL, 1371924626UL, 44760868UL, 634955171 | |
| | | UL, 1187096933UL, 3324788972UL, 3576888559UL, 2801347752UL, 3730298395UL, 1 | |
| | | 702170762UL, 4206083415UL, 741409141UL, 3649731355UL, 1025429529UL, }, {914 | |
| | | 44490UL, 628576944UL, 4069219862UL, 2253058925UL, 492354082UL, 1191182242UL | |
| | | , 1565180119UL, 2257613723UL, 456055162UL, 605712223UL, 953365104UL, 310463 | |
| | | 8527UL, 1133984729UL, 2662828416UL, 2134948274UL, 1921384447UL, 843719355UL | |
| | | , 588432962UL, 1734575434UL, 2924140067UL, 483396548UL, 3848838894UL, 31554 | |
| | | 76556UL, 1760928304UL, 4168059840UL, 3279827269UL, 2644461735UL, 4168565656 | |
| | | UL, 3951563569UL, 1276805504UL, 1708974143UL, 1878547888UL, 3465220024UL, 3 | |
| | | 062086782UL, 2801401651UL, 1510428126UL, 716404149UL, 1646021208UL, 3534932 | |
| | | 385UL, 1186585561UL, 651997355UL, 282914223UL, 352224857UL, 3764407517UL, 1 | |
| | | 059868753UL, 1971798134UL, 978904005UL, 976413661UL, 4039544152UL, 49898969 | |
| | | 3UL, 2565125471UL, 2782642813UL, 3537961025UL, 1194967362UL, 169217024UL, 3 | |
| | | 491609UL, 1319592872UL, 1630206561UL, 2497130840UL, 1685008996UL, 282894401 | |
| | | 6UL, 3301346775UL, 2893072371UL, 2606559798UL, 4026138031UL, 2664450619UL, | |
| | | 691091062UL, 1079640113UL, 1417637732UL, 4081852209UL, 2197910648UL, 231038 | |
| | | 2370UL, 1000957047UL, 959936499UL, 2844551811UL, 2272766890UL, 31122394UL, | |
| | | 2742925483UL, 1121884686UL, 57929089UL, 2468361281UL, 2982007782UL, 2371576 | |
| | | 893UL, 177782593UL, 3603584577UL, 672057044UL, 2108452841UL, 1671338057UL, | |
| | | 3386908223UL, 1243029765UL, 805157552UL, 1271858417UL, 1621249501UL, 180485 | |
| | | 1492UL, 1321010403UL, 751773221UL, 1517221627UL, 822709871UL, 104533154UL, | |
| | | 3578182264UL, 640541709UL, 421086624UL, 4233576392UL, 3729339369UL, 1974606 | |
| | | 44UL, 773140636UL, 2158026018UL, 1756785611UL, 4011575991UL, 3569445500UL, | |
| | | 736117181UL, 2456162322UL, 1168189787UL, 3651312675UL, 1070291988UL, 268231 | |
| | | 205UL, 541474497UL, 3316168972UL, 3546990856UL, 830417208UL, 725960194UL, 2 | |
| | | 044207227UL, 3188997938UL, 2383298579UL, 3350316374UL, 3575011225UL, 155311 | |
| | | 1865UL, 1285013027UL, 749371711UL, 766611716UL, 598195098UL, 2139882719UL, | |
| | | 2062405428UL, 3634702446UL, 3015263295UL, 223311969UL, 2622859522UL, 388849 | |
| | | 2701UL, 2955257225UL, 582625650UL, 3563756446UL, 2886083960UL, 1907546514UL | |
| | | , 454650902UL, 3287277541UL, 625828138UL, 2991888140UL, 1935326370UL, 40311 | |
| | | 52256UL, 702881509UL, 1427632724UL, 1345475301UL, 2577560804UL, 2858595147U | |
| | | L, 2533191188UL, 185662179UL, 536505093UL, 3747894147UL, 111551030UL, 37037 | |
| | | 3207UL, 2293908590UL, 91444490UL, 628576944UL, 4069219862UL, 2253058925UL, | |
| | | 1671484924UL, 1191182242UL, 1565180119UL, 2257613723UL, 456055162UL, 341109 | |
| | | 4744UL, 953365104UL, 3104638527UL, 1133984729UL, 2662828416UL, 2000630022UL | |
| | | , 1921384447UL, 843719355UL, 588432962UL, 1734575434UL, 3293926122UL, 48339 | |
| | | 6548UL, 3848838894UL, 3155476556UL, 1760928304UL, 146876953UL, 3279827269UL | |
| | | , 2644461735UL, 4168565656UL, 3951563569UL, 3976156700UL, 1708974143UL, 187 | |
| | | 8547888UL, 3465220024UL, 3062086782UL, 1999154400UL, 1510428126UL, 71640414 | |
| | | 9UL, 1646021208UL, 3534932385UL, 2479551429UL, 651997355UL, 282914223UL, 35 | |
| | | 2224857UL, 3764407517UL, 1275979651UL, 1971798134UL, 978904005UL, 976413661 | |
| | | UL, 4039544152UL, 300654823UL, 2565125471UL, 2782642813UL, 3537961025UL, 11 | |
| | | 94967362UL, 3123973648UL, 3491609UL, 1319592872UL, 1630206561UL, 2497130840 | |
| | | UL, 1437913158UL, 2828944016UL, 3301346775UL, 2893072371UL, 2606559798UL, 2 | |
| | | 153172585UL, 2664450619UL, 691091062UL, 1079640113UL, 1417637732UL, 1713723 | |
| | | 7UL, 2197910648UL, 2310382370UL, 1000957047UL, 959936499UL, 802137134UL, 22 | |
| | | 72766890UL, 31122394UL, 2742925483UL, 1121884686UL, 3909775167UL, 246836128 | |
| | | 1UL, 2982007782UL, 2371576893UL, 177782593UL, 3319492525UL, 672057044UL, 21 | |
| | | 08452841UL, 1671338057UL, 3386908223UL, 1878151473UL, 805157552UL, 12718584 | |
| | | 17UL, 1621249501UL, 1804851492UL, 3215921223UL, 751773221UL, 1517221627UL, | |
| | | 822709871UL, 104533154UL, 361845001UL, 640541709UL, 421086624UL, 4233576392 | |
| | | UL, 3729339369UL, 2655936801UL, 773140636UL, 2158026018UL, 1756785611UL, 40 | |
| | | 11575991UL, 587202971UL, 736117181UL, 2456162322UL, 1168189787UL, 365131267 | |
| | | 5UL, 2517883370UL, 268231205UL, 541474497UL, 3316168972UL, 3546990856UL, 20 | |
| | | 37251305UL, 725960194UL, 2044207227UL, 3188997938UL, 2383298579UL, 26650085 | |
| | | 87UL, 3575011225UL, 1553111865UL, 1285013027UL, 749371711UL, 2163964019UL, | |
| | | 598195098UL, 2139882719UL, 2062405428UL, 3634702446UL, 2788202059UL, 223311 | |
| | | 969UL, 2622859522UL, 3888492701UL, 2955257225UL, 740986174UL, 3563756446UL, | |
| | | 2886083960UL, 1907546514UL, 454650902UL, 2426323587UL, 625828138UL, 299188 | |
| | | 8140UL, 1935326370UL, 4031152256UL, 1831149435UL, 1427632724UL, 1345475301U | |
| | | L, 2577560804UL, 2858595147UL, 3977153945UL, 185662179UL, 536505093UL, 3747 | |
| | | 894147UL, 111551030UL, 4131587422UL, 2293908590UL, 91444490UL, 628576944UL, | |
| | | 4069219862UL, 2408189350UL, 1671484924UL, 1191182242UL, 1565180119UL, 2257 | |
| | | 613723UL, 1338069254UL, 3411094744UL, 953365104UL, 3104638527UL, 1133984729 | |
| | | UL, 631497759UL, 2000630022UL, 1921384447UL, 843719355UL, 588432962UL, 3280 | |
| | | 318959UL, 3293926122UL, 483396548UL, 3848838894UL, 3155476556UL, 1777918163 | |
| | | UL, 146876953UL, 3279827269UL, 2644461735UL, 4168565656UL, 2786264663UL, 39 | |
| | | 76156700UL, 1708974143UL, 1878547888UL, 3465220024UL, 2793923820UL, 1999154 | |
| | | 400UL, 1510428126UL, 716404149UL, 1646021208UL, 3102243824UL, 2479551429UL, | |
| | | 651997355UL, 282914223UL, 352224857UL, 3767702588UL, 1275979651UL, 1971798 | |
| | | 134UL, 978904005UL, 976413661UL, 1951622548UL, 300654823UL, 2565125471UL, 2 | |
| | | 782642813UL, 3537961025UL, 2186817324UL, 3123973648UL, 3491609UL, 131959287 | |
| | | 2UL, 1630206561UL, 1075424534UL, 1437913158UL, 2828944016UL, 3301346775UL, | |
| | | 2893072371UL, 207992406UL, 2153172585UL, 2664450619UL, 691091062UL, 1079640 | |
| | | 113UL, 3114255216UL, 17137237UL, 2197910648UL, 2310382370UL, 1000957047UL, | |
| | | 2548008553UL, 802137134UL, 2272766890UL, 31122394UL, 2742925483UL, 40694823 | |
| | | 73UL, 3909775167UL, 2468361281UL, 2982007782UL, 2371576893UL, 2807823912UL, | |
| | | 3319492525UL, 672057044UL, 2108452841UL, 1671338057UL, 12831353UL, 1878151 | |
| | | 473UL, 805157552UL, 1271858417UL, 1621249501UL, 461887094UL, 3215921223UL, | |
| | | 751773221UL, 1517221627UL, 822709871UL, 1317394918UL, 361845001UL, 64054170 | |
| | | 9UL, 421086624UL, 4233576392UL, 3385587450UL, 2655936801UL, 773140636UL, 21 | |
| | | 58026018UL, 1756785611UL, 1475601973UL, 587202971UL, 736117181UL, 245616232 | |
| | | 2UL, 1168189787UL, 911455077UL, 2517883370UL, 268231205UL, 541474497UL, 331 | |
| | | 6168972UL, 1500275507UL, 2037251305UL, 725960194UL, 2044207227UL, 318899793 | |
| | | 8UL, 2036633808UL, 2665008587UL, 3575011225UL, 1553111865UL, 1285013027UL, | |
| | | 87868216UL, 2163964019UL, 598195098UL, 2139882719UL, 2062405428UL, 51790730 | |
| | | 1UL, 2788202059UL, 223311969UL, 2622859522UL, 3888492701UL, 3926046234UL, 7 | |
| | | 40986174UL, 3563756446UL, 2886083960UL, 1907546514UL, 1911066215UL, 2426323 | |
| | | 587UL, 625828138UL, 2991888140UL, 1935326370UL, 2031853435UL, 1831149435UL, | |
| | | 1427632724UL, 1345475301UL, 2577560804UL, 3509674153UL, 3977153945UL, 1856 | |
| | | 62179UL, 536505093UL, 3747894147UL, 1711714600UL, 4131587422UL, 2293908590U | |
| | | L, 91444490UL, 628576944UL, 3370678255UL, 2408189350UL, 1671484924UL, 11911 | |
| | | 82242UL, 1565180119UL, 3786239592UL, 1338069254UL, 3411094744UL, 953365104U | |
| | | L, 3104638527UL, 3659647225UL, 631497759UL, 2000630022UL, 1921384447UL, 843 | |
| | | 719355UL, 3364831282UL, 3280318959UL, 3293926122UL, 483396548UL, 3848838894 | |
| | | UL, 3131266478UL, 1777918163UL, 146876953UL, 3279827269UL, 2644461735UL, 41 | |
| | | 56372383UL, 2786264663UL, 3976156700UL, 1708974143UL, 1878547888UL, 2168041 | |
| | | 590UL, 2793923820UL, 1999154400UL, 1510428126UL, 716404149UL, 3392113666UL, | |
| | | 3102243824UL, 2479551429UL, 651997355UL, 282914223UL, 2085613514UL, 376770 | |
| | | 2588UL, 1275979651UL, 1971798134UL, 978904005UL, 503506384UL, 1951622548UL, | |
| | | 300654823UL, 2565125471UL, 2782642813UL, 1458431750UL, 2186817324UL, 31239 | |
| | | 73648UL, 3491609UL, 1319592872UL, 452433679UL, 1075424534UL, 1437913158UL, | |
| | | 2828944016UL, 3301346775UL, 2333281307UL, 207992406UL, 2153172585UL, 266445 | |
| | | 0619UL, 691091062UL, 3553502652UL, 3114255216UL, 17137237UL, 2197910648UL, | |
| | | 2310382370UL, 3153689868UL, 2548008553UL, 802137134UL, 2272766890UL, 311223 | |
| | | 94UL, 468580641UL, 4069482373UL, 3909775167UL, 2468361281UL, 2982007782UL, | |
| | | 1445286890UL, 2807823912UL, 3319492525UL, 672057044UL, 2108452841UL, 175557 | |
| | | 7669UL, 12831353UL, 1878151473UL, 805157552UL, 1271858417UL, 2623540912UL, | |
| | | 461887094UL, 3215921223UL, 751773221UL, 1517221627UL, 3922191946UL, 1317394 | |
| | | 918UL, 361845001UL, 640541709UL, 421086624UL, 2173849516UL, 3385587450UL, 2 | |
| | | 655936801UL, 773140636UL, 2158026018UL, 1085377158UL, 1475601973UL, 5872029 | |
| | | 71UL, 736117181UL, 2456162322UL, 2158960374UL, 911455077UL, 2517883370UL, 2 | |
| | | 68231205UL, 541474497UL, 943191315UL, 1500275507UL, 2037251305UL, 725960194 | |
| | | UL, 2044207227UL, 2481150802UL, 2036633808UL, 2665008587UL, 3575011225UL, 1 | |
| | | 553111865UL, 2301231777UL, 87868216UL, 2163964019UL, 598195098UL, 213988271 | |
| | | 9UL, 2007840238UL, 517907301UL, 2788202059UL, 223311969UL, 2622859522UL, 15 | |
| | | 1920263UL, 3926046234UL, 740986174UL, 3563756446UL, 2886083960UL, 133893792 | |
| | | 8UL, 1911066215UL, 2426323587UL, 625828138UL, 2991888140UL, 2652286195UL, 2 | |
| | | 031853435UL, 1831149435UL, 1427632724UL, 1345475301UL, 289801789UL, 3509674 | |
| | | 153UL, 3977153945UL, 185662179UL, 536505093UL, 2727322952UL, 3980498348UL, | |
| | | 2529622213UL, 1903052964UL, 3564714651UL, 2281240568UL, 533384122UL, 277613 | |
| | | 480UL, 1815540358UL, 282763841UL, 3669112623UL, 2572859425UL, 195220178UL, | |
| | | 1210883545UL, 2359703600UL, 1187537824UL, 675732974UL, 325036095UL, 7080914 | |
| | | 65UL, 2556854604UL, 701006284UL, 2378459191UL, 1863513103UL, 2690918197UL, | |
| | | 4237307694UL, 1356483501UL, 2160905652UL, 521809106UL, 974368613UL, 3136010 | |
| | | 957UL, 2722488678UL, 3711515637UL, 2296341459UL, 4233729945UL, 1196247571UL | |
| | | , 3031398071UL, 515543502UL, 1314129776UL, 3235373306UL, 1303165859UL, 1820 | |
| | | 568009UL, 559099351UL, 186876368UL, 1076102111UL, 1218809551UL, 1790301111U | |
| | | L, 4130210229UL, 768125358UL, 1132864749UL, 4262563773UL, 2294411020UL, 409 | |
| | | 2943985UL, 2558108246UL, 3737664949UL, 2219923393UL, 724326159UL, 413410568 | |
| | | 2UL, 4188752746UL, 3615233671UL, 1526018731UL, 2281637916UL, 2459490295UL, | |
| | | 3637342666UL, 777862587UL, 39962002UL, 3772005832UL, 997473319UL, 574843584 | |
| | | UL, 3356551974UL, 1265234427UL, 1698059437UL, 534747571UL, 1465532164UL, 32 | |
| | | 63029035UL, 534512444UL, 2343092827UL, 2375685652UL, 2497926141UL, 23779336 | |
| | | 21UL, 2212335180UL, 261114084UL, 172755755UL, 2737085495UL, 2225257145UL, 1 | |
| | | 48605658UL, 1353911796UL, 357753009UL, 1778732943UL, 497635558UL, 413646797 | |
| | | 6UL, 2837964962UL, 4045039047UL, 2485296762UL, 1587587183UL, 4042904168UL, | |
| | | 3184240963UL, 2393293696UL, 915444966UL, 2299938515UL, 3351580749UL, 506575 | |
| | | 598UL, 1541916825UL, 3465300401UL, 525927458UL, 681152801UL, 331660975UL, 3 | |
| | | 624685846UL, 2994172100UL, 3274369082UL, 3638287602UL, 815689760UL, 1710961 | |
| | | 092UL, 2775607076UL, 2175058103UL, 3252688367UL, 2936890483UL, 2746319120UL | |
| | | , 2736754UL, 1646031035UL, 2448701214UL, 2886833213UL, 3689830606UL, 329279 | |
| | | 8106UL, 300773646UL, 3125160783UL, 1247453205UL, 2746275624UL, 4011063775UL | |
| | | , 904135764UL, 876847374UL, 366267234UL, 2541269205UL, 131376648UL, 1805948 | |
| | | 133UL, 3383589530UL, 2350119829UL, 2513170439UL, 4096158499UL, 4229211520UL | |
| | | , 2992048272UL, 1338522080UL, 1187391335UL, 2898563453UL, 2163088451UL, 141 | |
| | | 7971677UL, 2047421551UL, 902282791UL, 1143943232UL, 3568431811UL, 405986199 | |
| | | 3UL, 193362198UL, 2509297125UL, 3968551582UL, 2175686117UL, 3568936881UL, 1 | |
| | | 853177468UL, 2134063169UL, 2919389416UL, 1124914545UL, 1209806738UL, }, {11 | |
| | | 99972651UL, 1035834631UL, 3177798370UL, 860834162UL, 3741677748UL, 37803278 | |
| | | 29UL, 1693730265UL, 1643429511UL, 559568669UL, 2758650294UL, 647308222UL, 3 | |
| | | 901603996UL, 1778653821UL, 3618523672UL, 2154201067UL, 4261179460UL, 328576 | |
| | | 4480UL, 3334002738UL, 3215795953UL, 91368462UL, 1883994950UL, 1506873376UL, | |
| | | 1527780962UL, 4046354597UL, 4081676034UL, 2389066602UL, 1574939945UL, 4278 | |
| | | 45396UL, 2714836263UL, 1259019491UL, 2493238133UL, 2584034689UL, 3151382431 | |
| | | UL, 2171033919UL, 176883719UL, 2031844862UL, 1272380790UL, 1298975901UL, 40 | |
| | | 87222847UL, 1524000054UL, 311436877UL, 3627785554UL, 1889491722UL, 29380691 | |
| | | 93UL, 2771940687UL, 2756955968UL, 4289348777UL, 263514583UL, 887207028UL, 3 | |
| | | 522902525UL, 2273246349UL, 835377715UL, 2897243319UL, 204645450UL, 17759119 | |
| | | 83UL, 639470242UL, 2856296318UL, 3032942383UL, 2845501282UL, 1979082575UL, | |
| | | 202834023UL, 1876303820UL, 1434703409UL, 4240524132UL, 848853780UL, 4188621 | |
| | | 628UL, 928095314UL, 876412914UL, 3446576392UL, 3235688990UL, 4021419931UL, | |
| | | 2483628986UL, 3155781890UL, 399997246UL, 1642535200UL, 3872575068UL, 157795 | |
| | | 6550UL, 3606228634UL, 609914462UL, 653194726UL, 4048067248UL, 2500767965UL, | |
| | | 1125167825UL, 3707628088UL, 1819135158UL, 1875618971UL, 3865851141UL, 3282 | |
| | | 15079UL, 1695889194UL, 2040280471UL, 3384684457UL, 2540504961UL, 293050253U | |
| | | L, 525570078UL, 2655676443UL, 1392199429UL, 3370444585UL, 1937915855UL, 222 | |
| | | 9636250UL, 247937142UL, 2534538765UL, 365841057UL, 2449431033UL, 2456532429 | |
| | | UL, 101910696UL, 1247069485UL, 1523958293UL, 2473285670UL, 473709728UL, 302 | |
| | | 6667113UL, 2071968844UL, 324025193UL, 423064436UL, 3870800061UL, 3977393138 | |
| | | UL, 3632553233UL, 352757977UL, 1584833348UL, 3173248650UL, 1159857686UL, 15 | |
| | | 01841977UL, 1751860798UL, 617281070UL, 1958012761UL, 4031667102UL, 32321423 | |
| | | 21UL, 3087428595UL, 2380824676UL, 1194087757UL, 1542961747UL, 4163350364UL, | |
| | | 1721646249UL, 1672791861UL, 2900511710UL, 24973500UL, 1705444176UL, 713642 | |
| | | 505UL, 3017719513UL, 2090715200UL, 3521434070UL, 37117223UL, 1948295454UL, | |
| | | 3055840561UL, 3476120789UL, 3994249388UL, 527899063UL, 4285770666UL, 107552 | |
| | | 4023UL, 2594223535UL, 392943522UL, 171012646UL, 3515750082UL, 3414659054UL, | |
| | | 3501852926UL, 1493283737UL, 2662104279UL, 2033464928UL, 90134967UL, 363058 | |
| | | 647UL, 3289266998UL, 2470752727UL, 1199972651UL, 1035834631UL, 3177798370UL | |
| | | , 860834162UL, 1791097822UL, 3780327829UL, 1693730265UL, 1643429511UL, 5595 | |
| | | 68669UL, 3503319486UL, 647308222UL, 3901603996UL, 1778653821UL, 3618523672U | |
| | | L, 4294594427UL, 4261179460UL, 3285764480UL, 3334002738UL, 3215795953UL, 21 | |
| | | 2518363UL, 1883994950UL, 1506873376UL, 1527780962UL, 4046354597UL, 23986556 | |
| | | 00UL, 2389066602UL, 1574939945UL, 427845396UL, 2714836263UL, 2744363872UL, | |
| | | 2493238133UL, 2584034689UL, 3151382431UL, 2171033919UL, 2787053497UL, 20318 | |
| | | 44862UL, 1272380790UL, 1298975901UL, 4087222847UL, 2342953154UL, 311436877U | |
| | | L, 3627785554UL, 1889491722UL, 2938069193UL, 2026656505UL, 2756955968UL, 42 | |
| | | 89348777UL, 263514583UL, 887207028UL, 2097276163UL, 2273246349UL, 835377715 | |
| | | UL, 2897243319UL, 204645450UL, 4233399907UL, 639470242UL, 2856296318UL, 303 | |
| | | 2942383UL, 2845501282UL, 28260330UL, 202834023UL, 1876303820UL, 1434703409U | |
| | | L, 4240524132UL, 2455670466UL, 4188621628UL, 928095314UL, 876412914UL, 3446 | |
| | | 576392UL, 117581687UL, 4021419931UL, 2483628986UL, 3155781890UL, 399997246U | |
| | | L, 4254101087UL, 3872575068UL, 1577956550UL, 3606228634UL, 609914462UL, 400 | |
| | | 3279048UL, 4048067248UL, 2500767965UL, 1125167825UL, 3707628088UL, 92202051 | |
| | | 5UL, 1875618971UL, 3865851141UL, 328215079UL, 1695889194UL, 625773097UL, 33 | |
| | | 84684457UL, 2540504961UL, 293050253UL, 525570078UL, 2592805114UL, 139219942 | |
| | | 9UL, 3370444585UL, 1937915855UL, 2229636250UL, 3190958614UL, 2534538765UL, | |
| | | 365841057UL, 2449431033UL, 2456532429UL, 3778669305UL, 1247069485UL, 152395 | |
| | | 8293UL, 2473285670UL, 473709728UL, 720895889UL, 2071968844UL, 324025193UL, | |
| | | 423064436UL, 3870800061UL, 3535536111UL, 3632553233UL, 352757977UL, 1584833 | |
| | | 348UL, 3173248650UL, 2649344603UL, 1501841977UL, 1751860798UL, 617281070UL, | |
| | | 1958012761UL, 778965559UL, 3232142321UL, 3087428595UL, 2380824676UL, 11940 | |
| | | 87757UL, 3880222002UL, 4163350364UL, 1721646249UL, 1672791861UL, 2900511710 | |
| | | UL, 702936770UL, 1705444176UL, 713642505UL, 3017719513UL, 2090715200UL, 147 | |
| | | 7858694UL, 37117223UL, 1948295454UL, 3055840561UL, 3476120789UL, 464173532U | |
| | | L, 527899063UL, 4285770666UL, 1075524023UL, 2594223535UL, 2872629966UL, 171 | |
| | | 012646UL, 3515750082UL, 3414659054UL, 3501852926UL, 1631555059UL, 266210427 | |
| | | 9UL, 2033464928UL, 90134967UL, 363058647UL, 4112991722UL, 2470752727UL, 119 | |
| | | 9972651UL, 1035834631UL, 3177798370UL, 4152098951UL, 1791097822UL, 37803278 | |
| | | 29UL, 1693730265UL, 1643429511UL, 153020604UL, 3503319486UL, 647308222UL, 3 | |
| | | 901603996UL, 1778653821UL, 221887019UL, 4294594427UL, 4261179460UL, 3285764 | |
| | | 480UL, 3334002738UL, 3340918862UL, 212518363UL, 1883994950UL, 1506873376UL, | |
| | | 1527780962UL, 430180116UL, 2398655600UL, 2389066602UL, 1574939945UL, 42784 | |
| | | 5396UL, 1683639957UL, 2744363872UL, 2493238133UL, 2584034689UL, 3151382431U | |
| | | L, 752704472UL, 2787053497UL, 2031844862UL, 1272380790UL, 1298975901UL, 152 | |
| | | 8220628UL, 2342953154UL, 311436877UL, 3627785554UL, 1889491722UL, 257649546 | |
| | | 7UL, 2026656505UL, 2756955968UL, 4289348777UL, 263514583UL, 3778019638UL, 2 | |
| | | 097276163UL, 2273246349UL, 835377715UL, 2897243319UL, 1060067446UL, 4233399 | |
| | | 907UL, 639470242UL, 2856296318UL, 3032942383UL, 2351047932UL, 28260330UL, 2 | |
| | | 02834023UL, 1876303820UL, 1434703409UL, 3094305336UL, 2455670466UL, 4188621 | |
| | | 628UL, 928095314UL, 876412914UL, 3785385583UL, 117581687UL, 4021419931UL, 2 | |
| | | 483628986UL, 3155781890UL, 1867816730UL, 4254101087UL, 3872575068UL, 157795 | |
| | | 6550UL, 3606228634UL, 3081878598UL, 4003279048UL, 4048067248UL, 2500767965U | |
| | | L, 1125167825UL, 928465955UL, 922020515UL, 1875618971UL, 3865851141UL, 3282 | |
| | | 15079UL, 173810260UL, 625773097UL, 3384684457UL, 2540504961UL, 293050253UL, | |
| | | 2645143254UL, 2592805114UL, 1392199429UL, 3370444585UL, 1937915855UL, 1627 | |
| | | 81360UL, 3190958614UL, 2534538765UL, 365841057UL, 2449431033UL, 3105377832U | |
| | | L, 3778669305UL, 1247069485UL, 1523958293UL, 2473285670UL, 800971948UL, 720 | |
| | | 895889UL, 2071968844UL, 324025193UL, 423064436UL, 52577992UL, 3535536111UL, | |
| | | 3632553233UL, 352757977UL, 1584833348UL, 3305908059UL, 2649344603UL, 15018 | |
| | | 41977UL, 1751860798UL, 617281070UL, 264880505UL, 778965559UL, 3232142321UL, | |
| | | 3087428595UL, 2380824676UL, 1127761012UL, 3880222002UL, 4163350364UL, 1721 | |
| | | 646249UL, 1672791861UL, 2368512339UL, 702936770UL, 1705444176UL, 713642505U | |
| | | L, 3017719513UL, 197200752UL, 1477858694UL, 37117223UL, 1948295454UL, 30558 | |
| | | 40561UL, 1588372042UL, 464173532UL, 527899063UL, 4285770666UL, 1075524023UL | |
| | | , 2124039914UL, 2872629966UL, 171012646UL, 3515750082UL, 3414659054UL, 8185 | |
| | | 71456UL, 1631555059UL, 2662104279UL, 2033464928UL, 90134967UL, 952712086UL, | |
| | | 4112991722UL, 2470752727UL, 1199972651UL, 1035834631UL, 888975816UL, 41520 | |
| | | 98951UL, 1791097822UL, 3780327829UL, 1693730265UL, 3406785510UL, 153020604U | |
| | | L, 3503319486UL, 647308222UL, 3901603996UL, 3753248472UL, 221887019UL, 4294 | |
| | | 594427UL, 4261179460UL, 3285764480UL, 1861431346UL, 3340918862UL, 212518363 | |
| | | UL, 1883994950UL, 1506873376UL, 2695939612UL, 430180116UL, 2398655600UL, 23 | |
| | | 89066602UL, 1574939945UL, 2852159074UL, 1683639957UL, 2744363872UL, 2493238 | |
| | | 133UL, 2584034689UL, 1952065633UL, 752704472UL, 2787053497UL, 2031844862UL, | |
| | | 1272380790UL, 3530505866UL, 1528220628UL, 2342953154UL, 311436877UL, 36277 | |
| | | 85554UL, 3410473245UL, 2576495467UL, 2026656505UL, 2756955968UL, 4289348777 | |
| | | UL, 2856163034UL, 3778019638UL, 2097276163UL, 2273246349UL, 835377715UL, 31 | |
| | | 27280755UL, 1060067446UL, 4233399907UL, 639470242UL, 2856296318UL, 26157750 | |
| | | 11UL, 2351047932UL, 28260330UL, 202834023UL, 1876303820UL, 619308202UL, 309 | |
| | | 4305336UL, 2455670466UL, 4188621628UL, 928095314UL, 3764894047UL, 378538558 | |
| | | 3UL, 117581687UL, 4021419931UL, 2483628986UL, 3759839215UL, 1867816730UL, 4 | |
| | | 254101087UL, 3872575068UL, 1577956550UL, 1687107439UL, 3081878598UL, 400327 | |
| | | 9048UL, 4048067248UL, 2500767965UL, 2804044146UL, 928465955UL, 922020515UL, | |
| | | 1875618971UL, 3865851141UL, 2359176389UL, 173810260UL, 625773097UL, 338468 | |
| | | 4457UL, 2540504961UL, 3665420733UL, 2645143254UL, 2592805114UL, 1392199429U | |
| | | L, 3370444585UL, 1604709429UL, 162781360UL, 3190958614UL, 2534538765UL, 365 | |
| | | 841057UL, 3843585067UL, 3105377832UL, 3778669305UL, 1247069485UL, 152395829 | |
| | | 3UL, 293374051UL, 800971948UL, 720895889UL, 2071968844UL, 324025193UL, 3342 | |
| | | 361801UL, 52577992UL, 3535536111UL, 3632553233UL, 352757977UL, 1386594581UL | |
| | | , 3305908059UL, 2649344603UL, 1501841977UL, 1751860798UL, 3160423601UL, 264 | |
| | | 880505UL, 778965559UL, 3232142321UL, 3087428595UL, 3814775120UL, 1127761012 | |
| | | UL, 3880222002UL, 4163350364UL, 1721646249UL, 3640773034UL, 2368512339UL, 7 | |
| | | 02936770UL, 1705444176UL, 713642505UL, 1717761787UL, 197200752UL, 147785869 | |
| | | 4UL, 37117223UL, 1948295454UL, 896215772UL, 1588372042UL, 464173532UL, 5278 | |
| | | 99063UL, 4285770666UL, 3441409029UL, 2124039914UL, 2872629966UL, 171012646U | |
| | | L, 3515750082UL, 2216687886UL, 818571456UL, 1631555059UL, 2662104279UL, 203 | |
| | | 3464928UL, 369438400UL, 329003658UL, 1503365029UL, 4215790910UL, 3264377550 | |
| | | UL, 733526983UL, 2935318632UL, 1792331479UL, 608347530UL, 392723097UL, 1330 | |
| | | 445854UL, 3473004271UL, 1267636682UL, 2150566972UL, 2664910943UL, 259186163 | |
| | | 7UL, 409769584UL, 2943326880UL, 3746302819UL, 3162268832UL, 1028663260UL, 3 | |
| | | 206607045UL, 832105292UL, 2119405275UL, 538318455UL, 2981192295UL, 86177541 | |
| | | 6UL, 609718403UL, 3531204230UL, 1904759571UL, 1262633751UL, 2375133081UL, 4 | |
| | | 60454984UL, 946700253UL, 3763898311UL, 1571175213UL, 3124410107UL, 24134202 | |
| | | 16UL, 2664177543UL, 3241803820UL, 3968067371UL, 1234860999UL, 1130471500UL, | |
| | | 772727786UL, 247203117UL, 576455235UL, 246297007UL, 2027348597UL, 76493388 | |
| | | 7UL, 3812479771UL, 1825807084UL, 4072281412UL, 2156865781UL, 1286484847UL, | |
| | | 1966749063UL, 2479269303UL, 423506843UL, 3070938758UL, 653091413UL, 2267423 | |
| | | 132UL, 2004263526UL, 1374490719UL, 3871990628UL, 841138314UL, 1260317857UL, | |
| | | 3887432433UL, 4025147569UL, 764233331UL, 1794763428UL, 3005903468UL, 87792 | |
| | | 6770UL, 2466593927UL, 2971729561UL, 3203070565UL, 4198500026UL, 815665759UL | |
| | | , 2434508139UL, 1840456368UL, 2279000427UL, 17077200UL, 3178380570UL, 99030 | |
| | | 4199UL, 3578008580UL, 1965763660UL, 1640352477UL, 750159594UL, 2047409402UL | |
| | | , 3576308245UL, 544920564UL, 1730124869UL, 1194761386UL, 3280315505UL, 1473 | |
| | | 34027UL, 2870674244UL, 2076860776UL, 1100947675UL, 2482772161UL, 401966468U | |
| | | L, 1610650855UL, 193868446UL, 3808157106UL, 1509130117UL, 1324484736UL, 385 | |
| | | 2893217UL, 1059179497UL, 4053543778UL, 2557844172UL, 3282312002UL, 68255005 | |
| | | 8UL, 4281899173UL, 137171998UL, 3239159214UL, 2258610918UL, 426724741UL, 35 | |
| | | 02660993UL, 135977383UL, 429929363UL, 3984458137UL, 964026748UL, 2182019070 | |
| | | UL, 3836562946UL, 515026869UL, 359030455UL, 1301694917UL, 2300414803UL, 236 | |
| | | 4654981UL, 3804876710UL, 171119249UL, 2646785698UL, 4283509387UL, 362808776 | |
| | | 3UL, 1748227044UL, 3037141234UL, 3000413256UL, 23007314UL, 3598880509UL, 41 | |
| | | 60517314UL, 112205578UL, 1677675411UL, 734881643UL, 2830770338UL, 347031714 | |
| | | 5UL, 3306806569UL, 2635040943UL, 2671367560UL, 3528996498UL, 3878886478UL, | |
| | | 3114253828UL, 2721384408UL, 3175226991UL, 1393767271UL, 2651623266UL, 37679 | |
| | | 78376UL, 1269699398UL, 1100964192UL, 4169085845UL, 2086718107UL, 1286251099 | |
| | | UL, 764751784UL, 3006878591UL, }, | |
| | | }; | |
| | | | |
| #endif // CURAND_XORWOW_PRECALCULATED_H_ | | #endif // CURAND_XORWOW_PRECALCULATED_H_ | |
| | | | |
End of changes. 1 change blocks. |
| 0 lines changed or deleted | | 2062 lines changed or added | |
|
| texture_fetch_functions.h | | texture_fetch_functions.h | |
| /* | | /* | |
|
| * Copyright 1993-2010 NVIDIA Corporation. All rights reserved. | | * Copyright 1993-2011 NVIDIA Corporation. All rights reserved. | |
| * | | * | |
|
| * NOTICE TO USER: | | * NOTICE TO LICENSEE: | |
| * | | * | |
|
| * This source code is subject to NVIDIA ownership rights under U.S. and | | * This source code and/or documentation ("Licensed Deliverables") are | |
| * international Copyright laws. Users and possessors of this source code | | * subject to NVIDIA intellectual property rights under U.S. and | |
| * are hereby granted a nonexclusive, royalty-free license to use this code | | * international Copyright laws. | |
| * in individual and commercial software. | | | |
| * | | * | |
|
| * NVIDIA MAKES NO REPRESENTATION ABOUT THE SUITABILITY OF THIS SOURCE | | * These Licensed Deliverables contained herein is PROPRIETARY and | |
| * CODE FOR ANY PURPOSE. IT IS PROVIDED "AS IS" WITHOUT EXPRESS OR | | * CONFIDENTIAL to NVIDIA and is being provided under the terms and | |
| * IMPLIED WARRANTY OF ANY KIND. NVIDIA DISCLAIMS ALL WARRANTIES WITH | | * conditions of a form of NVIDIA software license agreement by and | |
| * REGARD TO THIS SOURCE CODE, INCLUDING ALL IMPLIED WARRANTIES OF | | * between NVIDIA and Licensee ("License Agreement") or electronically | |
| * MERCHANTABILITY, NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE. | | * accepted by Licensee. Notwithstanding any terms or conditions to | |
| * IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL | | * the contrary in the License Agreement, reproduction or disclosure | |
| , | | * of the Licensed Deliverables to any third party without the express | |
| * OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS | | * written consent of NVIDIA is prohibited. | |
| * OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE | | | |
| * OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE | | | |
| * OR PERFORMANCE OF THIS SOURCE CODE. | | | |
| * | | * | |
|
| * U.S. Government End Users. This source code is a "commercial item" as | | * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE | |
| * that term is defined at 48 C.F.R. 2.101 (OCT 1995), consisting of | | * LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE | |
| * "commercial computer software" and "commercial computer software | | * SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE. IT IS | |
| * documentation" as such terms are used in 48 C.F.R. 12.212 (SEPT 1995) | | * PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND. | |
| * and is provided to the U.S. Government only as a commercial end item. | | * NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED | |
| * Consistent with 48 C.F.R.12.212 and 48 C.F.R. 227.7202-1 through | | * DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY, | |
| * 227.7202-4 (JUNE 1995), all U.S. Government End Users acquire the | | * NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE. | |
| * source code with only those rights set forth herein. | | * NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE | |
| | | * LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY | |
| | | * SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY | |
| | | * DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, | |
| | | * WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS | |
| | | * ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE | |
| | | * OF THESE LICENSED DELIVERABLES. | |
| * | | * | |
|
| * Any use of this source code in individual and commercial software must | | * U.S. Government End Users. These Licensed Deliverables are a | |
| * include, in the user documentation and internal comments to the code, | | * "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT | |
| * the above Disclaimer and U.S. Government End Users Notice. | | * 1995), consisting of "commercial computer software" and "commercial | |
| | | * computer software documentation" as such terms are used in 48 | |
| | | * C.F.R. 12.212 (SEPT 1995) and is provided to the U.S. Government | |
| | | * only as a commercial end item. Consistent with 48 C.F.R.12.212 and | |
| | | * 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all | |
| | | * U.S. Government End Users acquire the Licensed Deliverables with | |
| | | * only those rights set forth herein. | |
| | | * | |
| | | * Any use of the Licensed Deliverables in individual and commercial | |
| | | * software must include, in the user documentation and internal | |
| | | * comments to the code, the above Disclaimer and U.S. Government End | |
| | | * Users Notice. | |
| */ | | */ | |
| | | | |
| #if !defined(__TEXTURE_FETCH_FUNCTIONS_H__) | | #if !defined(__TEXTURE_FETCH_FUNCTIONS_H__) | |
| #define __TEXTURE_FETCH_FUNCTIONS_H__ | | #define __TEXTURE_FETCH_FUNCTIONS_H__ | |
| | | | |
| #if defined(__cplusplus) && defined(__CUDACC__) | | #if defined(__cplusplus) && defined(__CUDACC__) | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| | | | |
| skipping to change at line 61 | | skipping to change at line 75 | |
| #include "vector_functions.h" | | #include "vector_functions.h" | |
| #include "vector_types.h" | | #include "vector_types.h" | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
| /*DEVICE_BUILTIN*/ | | /*DEVICE_BUILTIN*/ | |
|
| template<class T, enum cudaTextureReadMode readMode> extern __device__ uin
t4 __utexfetchi(texture<T, 1, readMode> t, int4 i); | | template<class T, enum cudaTextureReadMode readMode> extern __device__ uin
t4 __utexfetchi(texture<T, cudaTextureType1D, readMode> t, int4 i); | |
| /*DEVICE_BUILTIN*/ | | /*DEVICE_BUILTIN*/ | |
|
| template<class T, enum cudaTextureReadMode readMode> extern __device__ in
t4 __itexfetchi(texture<T, 1, readMode> t, int4 i); | | template<class T, enum cudaTextureReadMode readMode> extern __device__ in
t4 __itexfetchi(texture<T, cudaTextureType1D, readMode> t, int4 i); | |
| /*DEVICE_BUILTIN*/ | | /*DEVICE_BUILTIN*/ | |
|
| template<class T, enum cudaTextureReadMode readMode> extern __device__ floa
t4 __ftexfetchi(texture<T, 1, readMode> t, int4 i); | | template<class T, enum cudaTextureReadMode readMode> extern __device__ floa
t4 __ftexfetchi(texture<T, cudaTextureType1D, readMode> t, int4 i); | |
| | | | |
| /*DEVICE_BUILTIN*/ | | /*DEVICE_BUILTIN*/ | |
|
| template<class T, int dim, enum cudaTextureReadMode readMode> extern __devi
ce__ uint4 __utexfetch(texture<T, dim, readMode> t, float4 i, int d = dim)
; | | template<class T, int texType, enum cudaTextureReadMode readMode> extern __
device__ uint4 __utexfetch(texture<T, texType, readMode> t, float4 i, int
d = texType); | |
| /*DEVICE_BUILTIN*/ | | /*DEVICE_BUILTIN*/ | |
|
| template<class T, int dim, enum cudaTextureReadMode readMode> extern __devi
ce__ int4 __itexfetch(texture<T, dim, readMode> t, float4 i, int d = dim)
; | | template<class T, int texType, enum cudaTextureReadMode readMode> extern __
device__ int4 __itexfetch(texture<T, texType, readMode> t, float4 i, int
d = texType); | |
| /*DEVICE_BUILTIN*/ | | /*DEVICE_BUILTIN*/ | |
|
| template<class T, int dim, enum cudaTextureReadMode readMode> extern __devi | | template<class T, int texType, enum cudaTextureReadMode readMode> extern __ | |
| ce__ float4 __ftexfetch(texture<T, dim, readMode> t, float4 i, int d = dim) | | device__ float4 __ftexfetch(texture<T, texType, readMode> t, float4 i, int | |
| ; | | d = texType); | |
| | | | |
| | | /*DEVICE_BUILTIN*/ | |
| | | template<class T, int texType, enum cudaTextureReadMode readMode> extern __ | |
| | | device__ uint4 __utexfetchl(texture<T, texType, readMode> t, float4 i, int | |
| | | l, int d = (texType & 0xF)); | |
| | | /*DEVICE_BUILTIN*/ | |
| | | template<class T, int texType, enum cudaTextureReadMode readMode> extern __ | |
| | | device__ int4 __itexfetchl(texture<T, texType, readMode> t, float4 i, int | |
| | | l, int d = (texType & 0xF)); | |
| | | /*DEVICE_BUILTIN*/ | |
| | | template<class T, int texType, enum cudaTextureReadMode readMode> extern __ | |
| | | device__ float4 __ftexfetchl(texture<T, texType, readMode> t, float4 i, int | |
| | | l, int d = (texType & 0xF)); | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ char tex1Dfetch(texture<char, 1, cudaReadModeE
lementType> t, int x) | | static __inline__ __device__ char tex1Dfetch(texture<char, cudaTextureType1
D, cudaReadModeElementType> t, int x) | |
| { | | { | |
|
| #if __SIGNED_CHARS__ | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | | |
| #else | | | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
|
| #endif | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | | |
| return (char)v.x; | | return (char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ signed char tex1Dfetch(texture<signed char, 1,
cudaReadModeElementType> t, int x) | | static __inline__ __device__ signed char tex1Dfetch(texture<signed char, cu
daTextureType1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return (signed char)v.x; | | return (signed char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned char tex1Dfetch(texture<unsigned char
, 1, cudaReadModeElementType> t, int x) | | static __inline__ __device__ unsigned char tex1Dfetch(texture<unsigned char
, cudaTextureType1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return (unsigned char)v.x; | | return (unsigned char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char1 tex1Dfetch(texture<char1, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ char1 tex1Dfetch(texture<char1, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_char1(v.x); | | return make_char1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar1 tex1Dfetch(texture<uchar1, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ uchar1 tex1Dfetch(texture<uchar1, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_uchar1(v.x); | | return make_uchar1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char2 tex1Dfetch(texture<char2, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ char2 tex1Dfetch(texture<char2, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_char2(v.x, v.y); | | return make_char2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar2 tex1Dfetch(texture<uchar2, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ uchar2 tex1Dfetch(texture<uchar2, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_uchar2(v.x, v.y); | | return make_uchar2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char4 tex1Dfetch(texture<char4, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ char4 tex1Dfetch(texture<char4, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_char4(v.x, v.y, v.z, v.w); | | return make_char4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar4 tex1Dfetch(texture<uchar4, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ uchar4 tex1Dfetch(texture<uchar4, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_uchar4(v.x, v.y, v.z, v.w); | | return make_uchar4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ short tex1Dfetch(texture<short, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ short tex1Dfetch(texture<short, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return (short)v.x; | | return (short)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned short tex1Dfetch(texture<unsigned sho
rt, 1, cudaReadModeElementType> t, int x) | | static __inline__ __device__ unsigned short tex1Dfetch(texture<unsigned sho
rt, cudaTextureType1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return (unsigned short)v.x; | | return (unsigned short)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short1 tex1Dfetch(texture<short1, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ short1 tex1Dfetch(texture<short1, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_short1(v.x); | | return make_short1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort1 tex1Dfetch(texture<ushort1, 1, cudaRea
dModeElementType> t, int x) | | static __inline__ __device__ ushort1 tex1Dfetch(texture<ushort1, cudaTextur
eType1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_ushort1(v.x); | | return make_ushort1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short2 tex1Dfetch(texture<short2, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ short2 tex1Dfetch(texture<short2, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_short2(v.x, v.y); | | return make_short2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort2 tex1Dfetch(texture<ushort2, 1, cudaRea
dModeElementType> t, int x) | | static __inline__ __device__ ushort2 tex1Dfetch(texture<ushort2, cudaTextur
eType1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_ushort2(v.x, v.y); | | return make_ushort2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short4 tex1Dfetch(texture<short4, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ short4 tex1Dfetch(texture<short4, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_short4(v.x, v.y, v.z, v.w); | | return make_short4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort4 tex1Dfetch(texture<ushort4, 1, cudaRea
dModeElementType> t, int x) | | static __inline__ __device__ ushort4 tex1Dfetch(texture<ushort4, cudaTextur
eType1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_ushort4(v.x, v.y, v.z, v.w); | | return make_ushort4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ int tex1Dfetch(texture<int, 1, cudaReadModeEle
mentType> t, int x) | | static __inline__ __device__ int tex1Dfetch(texture<int, cudaTextureType1D,
cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return (int)v.x; | | return (int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned int tex1Dfetch(texture<unsigned int,
1, cudaReadModeElementType> t, int x) | | static __inline__ __device__ unsigned int tex1Dfetch(texture<unsigned int,
cudaTextureType1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return (unsigned int)v.x; | | return (unsigned int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int1 tex1Dfetch(texture<int1, 1, cudaReadModeE
lementType> t, int x) | | static __inline__ __device__ int1 tex1Dfetch(texture<int1, cudaTextureType1
D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_int1(v.x); | | return make_int1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint1 tex1Dfetch(texture<uint1, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ uint1 tex1Dfetch(texture<uint1, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_uint1(v.x); | | return make_uint1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int2 tex1Dfetch(texture<int2, 1, cudaReadModeE
lementType> t, int x) | | static __inline__ __device__ int2 tex1Dfetch(texture<int2, cudaTextureType1
D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_int2(v.x, v.y); | | return make_int2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint2 tex1Dfetch(texture<uint2, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ uint2 tex1Dfetch(texture<uint2, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_uint2(v.x, v.y); | | return make_uint2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int4 tex1Dfetch(texture<int4, 1, cudaReadModeE
lementType> t, int x) | | static __inline__ __device__ int4 tex1Dfetch(texture<int4, cudaTextureType1
D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_int4(v.x, v.y, v.z, v.w); | | return make_int4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint4 tex1Dfetch(texture<uint4, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ uint4 tex1Dfetch(texture<uint4, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_uint4(v.x, v.y, v.z, v.w); | | return make_uint4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| #if !defined(__LP64__) | | #if !defined(__LP64__) | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ long tex1Dfetch(texture<long, 1, cudaReadModeE
lementType> t, int x) | | static __inline__ __device__ long tex1Dfetch(texture<long, cudaTextureType1
D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return (long)v.x; | | return (long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned long tex1Dfetch(texture<unsigned long
, 1, cudaReadModeElementType> t, int x) | | static __inline__ __device__ unsigned long tex1Dfetch(texture<unsigned long
, cudaTextureType1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return (unsigned long)v.x; | | return (unsigned long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long1 tex1Dfetch(texture<long1, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ long1 tex1Dfetch(texture<long1, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_long1(v.x); | | return make_long1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong1 tex1Dfetch(texture<ulong1, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ ulong1 tex1Dfetch(texture<ulong1, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_ulong1(v.x); | | return make_ulong1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long2 tex1Dfetch(texture<long2, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ long2 tex1Dfetch(texture<long2, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_long2(v.x, v.y); | | return make_long2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong2 tex1Dfetch(texture<ulong2, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ ulong2 tex1Dfetch(texture<ulong2, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_ulong2(v.x, v.y); | | return make_ulong2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long4 tex1Dfetch(texture<long4, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ long4 tex1Dfetch(texture<long4, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_long4(v.x, v.y, v.z, v.w); | | return make_long4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong4 tex1Dfetch(texture<ulong4, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ ulong4 tex1Dfetch(texture<ulong4, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_ulong4(v.x, v.y, v.z, v.w); | | return make_ulong4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| #endif /* !__LP64__ */ | | #endif /* !__LP64__ */ | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex1Dfetch(texture<float, 1, cudaReadMod
eElementType> t, int x) | | static __inline__ __device__ float tex1Dfetch(texture<float, cudaTextureTyp
e1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| float4 v = __ftexfetchi(t, make_int4(x, 0, 0, 0)); | | float4 v = __ftexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return v.x; | | return v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1Dfetch(texture<float1, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ float1 tex1Dfetch(texture<float1, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| float4 v = __ftexfetchi(t, make_int4(x, 0, 0, 0)); | | float4 v = __ftexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_float1(v.x); | | return make_float1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1Dfetch(texture<float2, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ float2 tex1Dfetch(texture<float2, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| float4 v = __ftexfetchi(t, make_int4(x, 0, 0, 0)); | | float4 v = __ftexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_float2(v.x, v.y); | | return make_float2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1Dfetch(texture<float4, 1, cudaReadM
odeElementType> t, int x) | | static __inline__ __device__ float4 tex1Dfetch(texture<float4, cudaTextureT
ype1D, cudaReadModeElementType> t, int x) | |
| { | | { | |
| float4 v = __ftexfetchi(t, make_int4(x, 0, 0, 0)); | | float4 v = __ftexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | | |
| return make_float4(v.x, v.y, v.z, v.w); | | return make_float4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex1Dfetch(texture<char, 1, cudaReadMode
NormalizedFloat> t, int x) | | static __inline__ __device__ float tex1Dfetch(texture<char, cudaTextureType
1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
|
| #if __SIGNED_CHARS__ | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | | |
| #else | | | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
|
| #endif | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex1Dfetch(texture<signed char, 1, cudaR
eadModeNormalizedFloat> t, int x) | | static __inline__ __device__ float tex1Dfetch(texture<signed char, cudaText
ureType1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex1Dfetch(texture<unsigned char, 1, cud
aReadModeNormalizedFloat> t, int x) | | static __inline__ __device__ float tex1Dfetch(texture<unsigned char, cudaTe
xtureType1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1Dfetch(texture<char1, 1, cudaReadMo
deNormalizedFloat> t, int x) | | static __inline__ __device__ float1 tex1Dfetch(texture<char1, cudaTextureTy
pe1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1Dfetch(texture<uchar1, 1, cudaReadM
odeNormalizedFloat> t, int x) | | static __inline__ __device__ float1 tex1Dfetch(texture<uchar1, cudaTextureT
ype1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1Dfetch(texture<char2, 1, cudaReadMo
deNormalizedFloat> t, int x) | | static __inline__ __device__ float2 tex1Dfetch(texture<char2, cudaTextureTy
pe1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1Dfetch(texture<uchar2, 1, cudaReadM
odeNormalizedFloat> t, int x) | | static __inline__ __device__ float2 tex1Dfetch(texture<uchar2, cudaTextureT
ype1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1Dfetch(texture<char4, 1, cudaReadMo
deNormalizedFloat> t, int x) | | static __inline__ __device__ float4 tex1Dfetch(texture<char4, cudaTextureTy
pe1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1Dfetch(texture<uchar4, 1, cudaReadM
odeNormalizedFloat> t, int x) | | static __inline__ __device__ float4 tex1Dfetch(texture<uchar4, cudaTextureT
ype1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex1Dfetch(texture<short, 1, cudaReadMod
eNormalizedFloat> t, int x) | | static __inline__ __device__ float tex1Dfetch(texture<short, cudaTextureTyp
e1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex1Dfetch(texture<unsigned short, 1, cu
daReadModeNormalizedFloat> t, int x) | | static __inline__ __device__ float tex1Dfetch(texture<unsigned short, cudaT
extureType1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1Dfetch(texture<short1, 1, cudaReadM
odeNormalizedFloat> t, int x) | | static __inline__ __device__ float1 tex1Dfetch(texture<short1, cudaTextureT
ype1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1Dfetch(texture<ushort1, 1, cudaRead
ModeNormalizedFloat> t, int x) | | static __inline__ __device__ float1 tex1Dfetch(texture<ushort1, cudaTexture
Type1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1Dfetch(texture<short2, 1, cudaReadM
odeNormalizedFloat> t, int x) | | static __inline__ __device__ float2 tex1Dfetch(texture<short2, cudaTextureT
ype1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1Dfetch(texture<ushort2, 1, cudaRead
ModeNormalizedFloat> t, int x) | | static __inline__ __device__ float2 tex1Dfetch(texture<ushort2, cudaTexture
Type1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1Dfetch(texture<short4, 1, cudaReadM
odeNormalizedFloat> t, int x) | | static __inline__ __device__ float4 tex1Dfetch(texture<short4, cudaTextureT
ype1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | | int4 v = __itexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1Dfetch(texture<ushort4, 1, cudaRead
ModeNormalizedFloat> t, int x) | | static __inline__ __device__ float4 tex1Dfetch(texture<ushort4, cudaTexture
Type1D, cudaReadModeNormalizedFloat> t, int x) | |
| { | | { | |
| uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | | uint4 v = __utexfetchi(t, make_int4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ char tex1D(texture<char, 1, cudaReadModeElemen
tType> t, float x) | | static __inline__ __device__ char tex1D(texture<char, cudaTextureType1D, cu
daReadModeElementType> t, float x) | |
| { | | { | |
|
| #if __SIGNED_CHARS__ | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | | |
| #else | | | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
|
| #endif | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | | |
| return (char)v.x; | | return (char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ signed char tex1D(texture<signed char, 1, cuda
ReadModeElementType> t, float x) | | static __inline__ __device__ signed char tex1D(texture<signed char, cudaTex
tureType1D, cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return (signed char)v.x; | | return (signed char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned char tex1D(texture<unsigned char, 1,
cudaReadModeElementType> t, float x) | | static __inline__ __device__ unsigned char tex1D(texture<unsigned char, cud
aTextureType1D, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return (unsigned char)v.x; | | return (unsigned char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char1 tex1D(texture<char1, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ char1 tex1D(texture<char1, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_char1(v.x); | | return make_char1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar1 tex1D(texture<uchar1, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ uchar1 tex1D(texture<uchar1, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_uchar1(v.x); | | return make_uchar1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char2 tex1D(texture<char2, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ char2 tex1D(texture<char2, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_char2(v.x, v.y); | | return make_char2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar2 tex1D(texture<uchar2, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ uchar2 tex1D(texture<uchar2, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_uchar2(v.x, v.y); | | return make_uchar2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char4 tex1D(texture<char4, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ char4 tex1D(texture<char4, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_char4(v.x, v.y, v.z, v.w); | | return make_char4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar4 tex1D(texture<uchar4, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ uchar4 tex1D(texture<uchar4, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_uchar4(v.x, v.y, v.z, v.w); | | return make_uchar4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ short tex1D(texture<short, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ short tex1D(texture<short, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return (short)v.x; | | return (short)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned short tex1D(texture<unsigned short, 1
, cudaReadModeElementType> t, float x) | | static __inline__ __device__ unsigned short tex1D(texture<unsigned short, c
udaTextureType1D, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return (unsigned short)v.x; | | return (unsigned short)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short1 tex1D(texture<short1, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ short1 tex1D(texture<short1, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_short1(v.x); | | return make_short1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort1 tex1D(texture<ushort1, 1, cudaReadMode
ElementType> t, float x) | | static __inline__ __device__ ushort1 tex1D(texture<ushort1, cudaTextureType
1D, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_ushort1(v.x); | | return make_ushort1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short2 tex1D(texture<short2, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ short2 tex1D(texture<short2, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_short2(v.x, v.y); | | return make_short2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort2 tex1D(texture<ushort2, 1, cudaReadMode
ElementType> t, float x) | | static __inline__ __device__ ushort2 tex1D(texture<ushort2, cudaTextureType
1D, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_ushort2(v.x, v.y); | | return make_ushort2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short4 tex1D(texture<short4, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ short4 tex1D(texture<short4, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_short4(v.x, v.y, v.z, v.w); | | return make_short4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort4 tex1D(texture<ushort4, 1, cudaReadMode
ElementType> t, float x) | | static __inline__ __device__ ushort4 tex1D(texture<ushort4, cudaTextureType
1D, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_ushort4(v.x, v.y, v.z, v.w); | | return make_ushort4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ int tex1D(texture<int, 1, cudaReadModeElementT
ype> t, float x) | | static __inline__ __device__ int tex1D(texture<int, cudaTextureType1D, cuda
ReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return (int)v.x; | | return (int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned int tex1D(texture<unsigned int, 1, cu
daReadModeElementType> t, float x) | | static __inline__ __device__ unsigned int tex1D(texture<unsigned int, cudaT
extureType1D, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return (unsigned int)v.x; | | return (unsigned int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int1 tex1D(texture<int1, 1, cudaReadModeElemen
tType> t, float x) | | static __inline__ __device__ int1 tex1D(texture<int1, cudaTextureType1D, cu
daReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_int1(v.x); | | return make_int1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint1 tex1D(texture<uint1, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ uint1 tex1D(texture<uint1, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_uint1(v.x); | | return make_uint1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int2 tex1D(texture<int2, 1, cudaReadModeElemen
tType> t, float x) | | static __inline__ __device__ int2 tex1D(texture<int2, cudaTextureType1D, cu
daReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_int2(v.x, v.y); | | return make_int2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint2 tex1D(texture<uint2, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ uint2 tex1D(texture<uint2, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_uint2(v.x, v.y); | | return make_uint2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int4 tex1D(texture<int4, 1, cudaReadModeElemen
tType> t, float x) | | static __inline__ __device__ int4 tex1D(texture<int4, cudaTextureType1D, cu
daReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_int4(v.x, v.y, v.z, v.w); | | return make_int4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint4 tex1D(texture<uint4, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ uint4 tex1D(texture<uint4, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_uint4(v.x, v.y, v.z, v.w); | | return make_uint4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
| #if !defined(__LP64__) | | #if !defined(__LP64__) | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ long tex1D(texture<long, 1, cudaReadModeElemen
tType> t, float x) | | static __inline__ __device__ long tex1D(texture<long, cudaTextureType1D, cu
daReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return (long)v.x; | | return (long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned long tex1D(texture<unsigned long, 1,
cudaReadModeElementType> t, float x) | | static __inline__ __device__ unsigned long tex1D(texture<unsigned long, cud
aTextureType1D, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return (unsigned long)v.x; | | return (unsigned long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long1 tex1D(texture<long1, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ long1 tex1D(texture<long1, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_long1(v.x); | | return make_long1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong1 tex1D(texture<ulong1, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ ulong1 tex1D(texture<ulong1, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_ulong1(v.x); | | return make_ulong1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long2 tex1D(texture<long2, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ long2 tex1D(texture<long2, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_long2(v.x, v.y); | | return make_long2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong2 tex1D(texture<ulong2, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ ulong2 tex1D(texture<ulong2, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_ulong2(v.x, v.y); | | return make_ulong2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long4 tex1D(texture<long4, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ long4 tex1D(texture<long4, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_long4(v.x, v.y, v.z, v.w); | | return make_long4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong4 tex1D(texture<ulong4, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ ulong4 tex1D(texture<ulong4, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_ulong4(v.x, v.y, v.z, v.w); | | return make_ulong4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| #endif /* !__LP64__ */ | | #endif /* !__LP64__ */ | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex1D(texture<float, 1, cudaReadModeElem
entType> t, float x) | | static __inline__ __device__ float tex1D(texture<float, cudaTextureType1D,
cudaReadModeElementType> t, float x) | |
| { | | { | |
| float4 v = __ftexfetch(t, make_float4(x, 0, 0, 0)); | | float4 v = __ftexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return v.x; | | return v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1D(texture<float1, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ float1 tex1D(texture<float1, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| float4 v = __ftexfetch(t, make_float4(x, 0, 0, 0)); | | float4 v = __ftexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_float1(v.x); | | return make_float1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1D(texture<float2, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ float2 tex1D(texture<float2, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| float4 v = __ftexfetch(t, make_float4(x, 0, 0, 0)); | | float4 v = __ftexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_float2(v.x, v.y); | | return make_float2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1D(texture<float4, 1, cudaReadModeEl
ementType> t, float x) | | static __inline__ __device__ float4 tex1D(texture<float4, cudaTextureType1D
, cudaReadModeElementType> t, float x) | |
| { | | { | |
| float4 v = __ftexfetch(t, make_float4(x, 0, 0, 0)); | | float4 v = __ftexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | | |
| return make_float4(v.x, v.y, v.z, v.w); | | return make_float4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex1D(texture<char, 1, cudaReadModeNorma
lizedFloat> t, float x) | | static __inline__ __device__ float tex1D(texture<char, cudaTextureType1D, c
udaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
|
| #if __SIGNED_CHARS__ | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | | |
| #else | | | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
|
| #endif | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex1D(texture<signed char, 1, cudaReadMo
deNormalizedFloat> t, float x) | | static __inline__ __device__ float tex1D(texture<signed char, cudaTextureTy
pe1D, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex1D(texture<unsigned char, 1, cudaRead
ModeNormalizedFloat> t, float x) | | static __inline__ __device__ float tex1D(texture<unsigned char, cudaTexture
Type1D, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1D(texture<char1, 1, cudaReadModeNor
malizedFloat> t, float x) | | static __inline__ __device__ float1 tex1D(texture<char1, cudaTextureType1D,
cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1D(texture<uchar1, 1, cudaReadModeNo
rmalizedFloat> t, float x) | | static __inline__ __device__ float1 tex1D(texture<uchar1, cudaTextureType1D
, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1D(texture<char2, 1, cudaReadModeNor
malizedFloat> t, float x) | | static __inline__ __device__ float2 tex1D(texture<char2, cudaTextureType1D,
cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1D(texture<uchar2, 1, cudaReadModeNo
rmalizedFloat> t, float x) | | static __inline__ __device__ float2 tex1D(texture<uchar2, cudaTextureType1D
, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1D(texture<char4, 1, cudaReadModeNor
malizedFloat> t, float x) | | static __inline__ __device__ float4 tex1D(texture<char4, cudaTextureType1D,
cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1D(texture<uchar4, 1, cudaReadModeNo
rmalizedFloat> t, float x) | | static __inline__ __device__ float4 tex1D(texture<uchar4, cudaTextureType1D
, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex1D(texture<short, 1, cudaReadModeNorm
alizedFloat> t, float x) | | static __inline__ __device__ float tex1D(texture<short, cudaTextureType1D,
cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex1D(texture<unsigned short, 1, cudaRea
dModeNormalizedFloat> t, float x) | | static __inline__ __device__ float tex1D(texture<unsigned short, cudaTextur
eType1D, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1D(texture<short1, 1, cudaReadModeNo
rmalizedFloat> t, float x) | | static __inline__ __device__ float1 tex1D(texture<short1, cudaTextureType1D
, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex1D(texture<ushort1, 1, cudaReadModeN
ormalizedFloat> t, float x) | | static __inline__ __device__ float1 tex1D(texture<ushort1, cudaTextureType1
D, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1D(texture<short2, 1, cudaReadModeNo
rmalizedFloat> t, float x) | | static __inline__ __device__ float2 tex1D(texture<short2, cudaTextureType1D
, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex1D(texture<ushort2, 1, cudaReadModeN
ormalizedFloat> t, float x) | | static __inline__ __device__ float2 tex1D(texture<ushort2, cudaTextureType1
D, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1D(texture<short4, 1, cudaReadModeNo
rmalizedFloat> t, float x) | | static __inline__ __device__ float4 tex1D(texture<short4, cudaTextureType1D
, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex1D(texture<ushort4, 1, cudaReadModeN
ormalizedFloat> t, float x) | | static __inline__ __device__ float4 tex1D(texture<ushort4, cudaTextureType1
D, cudaReadModeNormalizedFloat> t, float x) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, 0, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| * 2D Texture functions
* | | * 2D Texture functions
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ char tex2D(texture<char, 2, cudaReadModeElemen
tType> t, float x, float y) | | static __inline__ __device__ char tex2D(texture<char, cudaTextureType2D, cu
daReadModeElementType> t, float x, float y) | |
| { | | { | |
|
| #if __SIGNED_CHARS__ | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | | |
| #else | | | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
|
| #endif | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | | |
| return (char)v.x; | | return (char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ signed char tex2D(texture<signed char, 2, cuda
ReadModeElementType> t, float x, float y) | | static __inline__ __device__ signed char tex2D(texture<signed char, cudaTex
tureType2D, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return (signed char)v.x; | | return (signed char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned char tex2D(texture<unsigned char, 2,
cudaReadModeElementType> t, float x, float y) | | static __inline__ __device__ unsigned char tex2D(texture<unsigned char, cud
aTextureType2D, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return (unsigned char)v.x; | | return (unsigned char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char1 tex2D(texture<char1, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ char1 tex2D(texture<char1, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_char1(v.x); | | return make_char1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar1 tex2D(texture<uchar1, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ uchar1 tex2D(texture<uchar1, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_uchar1(v.x); | | return make_uchar1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char2 tex2D(texture<char2, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ char2 tex2D(texture<char2, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_char2(v.x, v.y); | | return make_char2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar2 tex2D(texture<uchar2, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ uchar2 tex2D(texture<uchar2, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_uchar2(v.x, v.y); | | return make_uchar2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char4 tex2D(texture<char4, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ char4 tex2D(texture<char4, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_char4(v.x, v.y, v.z, v.w); | | return make_char4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar4 tex2D(texture<uchar4, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ uchar4 tex2D(texture<uchar4, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_uchar4(v.x, v.y, v.z, v.w); | | return make_uchar4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ short tex2D(texture<short, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ short tex2D(texture<short, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return (short)v.x; | | return (short)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned short tex2D(texture<unsigned short, 2
, cudaReadModeElementType> t, float x, float y) | | static __inline__ __device__ unsigned short tex2D(texture<unsigned short, c
udaTextureType2D, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return (unsigned short)v.x; | | return (unsigned short)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short1 tex2D(texture<short1, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ short1 tex2D(texture<short1, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_short1(v.x); | | return make_short1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort1 tex2D(texture<ushort1, 2, cudaReadMode
ElementType> t, float x, float y) | | static __inline__ __device__ ushort1 tex2D(texture<ushort1, cudaTextureType
2D, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_ushort1(v.x); | | return make_ushort1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short2 tex2D(texture<short2, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ short2 tex2D(texture<short2, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_short2(v.x, v.y); | | return make_short2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort2 tex2D(texture<ushort2, 2, cudaReadMode
ElementType> t, float x, float y) | | static __inline__ __device__ ushort2 tex2D(texture<ushort2, cudaTextureType
2D, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_ushort2(v.x, v.y); | | return make_ushort2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short4 tex2D(texture<short4, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ short4 tex2D(texture<short4, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_short4(v.x, v.y, v.z, v.w); | | return make_short4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort4 tex2D(texture<ushort4, 2, cudaReadMode
ElementType> t, float x, float y) | | static __inline__ __device__ ushort4 tex2D(texture<ushort4, cudaTextureType
2D, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_ushort4(v.x, v.y, v.z, v.w); | | return make_ushort4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ int tex2D(texture<int, 2, cudaReadModeElementT
ype> t, float x, float y) | | static __inline__ __device__ int tex2D(texture<int, cudaTextureType2D, cuda
ReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return (int)v.x; | | return (int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned int tex2D(texture<unsigned int, 2, cu
daReadModeElementType> t, float x, float y) | | static __inline__ __device__ unsigned int tex2D(texture<unsigned int, cudaT
extureType2D, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return (unsigned int)v.x; | | return (unsigned int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int1 tex2D(texture<int1, 2, cudaReadModeElemen
tType> t, float x, float y) | | static __inline__ __device__ int1 tex2D(texture<int1, cudaTextureType2D, cu
daReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_int1(v.x); | | return make_int1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint1 tex2D(texture<uint1, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ uint1 tex2D(texture<uint1, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_uint1(v.x); | | return make_uint1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int2 tex2D(texture<int2, 2, cudaReadModeElemen
tType> t, float x, float y) | | static __inline__ __device__ int2 tex2D(texture<int2, cudaTextureType2D, cu
daReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_int2(v.x, v.y); | | return make_int2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint2 tex2D(texture<uint2, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ uint2 tex2D(texture<uint2, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_uint2(v.x, v.y); | | return make_uint2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int4 tex2D(texture<int4, 2, cudaReadModeElemen
tType> t, float x, float y) | | static __inline__ __device__ int4 tex2D(texture<int4, cudaTextureType2D, cu
daReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_int4(v.x, v.y, v.z, v.w); | | return make_int4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint4 tex2D(texture<uint4, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ uint4 tex2D(texture<uint4, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_uint4(v.x, v.y, v.z, v.w); | | return make_uint4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| #if !defined(__LP64__) | | #if !defined(__LP64__) | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ long tex2D(texture<long, 2, cudaReadModeElemen
tType> t, float x, float y) | | static __inline__ __device__ long tex2D(texture<long, cudaTextureType2D, cu
daReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return (long)v.x; | | return (long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned long tex2D(texture<unsigned long, 2,
cudaReadModeElementType> t, float x, float y) | | static __inline__ __device__ unsigned long tex2D(texture<unsigned long, cud
aTextureType2D, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return (unsigned long)v.x; | | return (unsigned long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long1 tex2D(texture<long1, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ long1 tex2D(texture<long1, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_long1(v.x); | | return make_long1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong1 tex2D(texture<ulong1, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ ulong1 tex2D(texture<ulong1, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_ulong1(v.x); | | return make_ulong1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long2 tex2D(texture<long2, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ long2 tex2D(texture<long2, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_long2(v.x, v.y); | | return make_long2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong2 tex2D(texture<ulong2, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ ulong2 tex2D(texture<ulong2, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_ulong2(v.x, v.y); | | return make_ulong2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long4 tex2D(texture<long4, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ long4 tex2D(texture<long4, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_long4(v.x, v.y, v.z, v.w); | | return make_long4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong4 tex2D(texture<ulong4, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ ulong4 tex2D(texture<ulong4, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_ulong4(v.x, v.y, v.z, v.w); | | return make_ulong4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| #endif /* !__LP64__ */ | | #endif /* !__LP64__ */ | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex2D(texture<float, 2, cudaReadModeElem
entType> t, float x, float y) | | static __inline__ __device__ float tex2D(texture<float, cudaTextureType2D,
cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| float4 v = __ftexfetch(t, make_float4(x, y, 0, 0)); | | float4 v = __ftexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return v.x; | | return v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex2D(texture<float1, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ float1 tex2D(texture<float1, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| float4 v = __ftexfetch(t, make_float4(x, y, 0, 0)); | | float4 v = __ftexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_float1(v.x); | | return make_float1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex2D(texture<float2, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ float2 tex2D(texture<float2, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| float4 v = __ftexfetch(t, make_float4(x, y, 0, 0)); | | float4 v = __ftexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_float2(v.x, v.y); | | return make_float2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2D(texture<float4, 2, cudaReadModeEl
ementType> t, float x, float y) | | static __inline__ __device__ float4 tex2D(texture<float4, cudaTextureType2D
, cudaReadModeElementType> t, float x, float y) | |
| { | | { | |
| float4 v = __ftexfetch(t, make_float4(x, y, 0, 0)); | | float4 v = __ftexfetch(t, make_float4(x, y, 0, 0)); | |
| | | | |
| return make_float4(v.x, v.y, v.z, v.w); | | return make_float4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex2D(texture<char, 2, cudaReadModeNorma
lizedFloat> t, float x, float y) | | static __inline__ __device__ float tex2D(texture<char, cudaTextureType2D, c
udaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
|
| #if __SIGNED_CHARS__ | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | | |
| #else | | | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
|
| #endif | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex2D(texture<signed char, 2, cudaReadMo
deNormalizedFloat> t, float x, float y) | | static __inline__ __device__ float tex2D(texture<signed char, cudaTextureTy
pe2D, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex2D(texture<unsigned char, 2, cudaRead
ModeNormalizedFloat> t, float x, float y) | | static __inline__ __device__ float tex2D(texture<unsigned char, cudaTexture
Type2D, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex2D(texture<char1, 2, cudaReadModeNor
malizedFloat> t, float x, float y) | | static __inline__ __device__ float1 tex2D(texture<char1, cudaTextureType2D,
cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex2D(texture<uchar1, 2, cudaReadModeNo
rmalizedFloat> t, float x, float y) | | static __inline__ __device__ float1 tex2D(texture<uchar1, cudaTextureType2D
, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex2D(texture<char2, 2, cudaReadModeNor
malizedFloat> t, float x, float y) | | static __inline__ __device__ float2 tex2D(texture<char2, cudaTextureType2D,
cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex2D(texture<uchar2, 2, cudaReadModeNo
rmalizedFloat> t, float x, float y) | | static __inline__ __device__ float2 tex2D(texture<uchar2, cudaTextureType2D
, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2D(texture<char4, 2, cudaReadModeNor
malizedFloat> t, float x, float y) | | static __inline__ __device__ float4 tex2D(texture<char4, cudaTextureType2D,
cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2D(texture<uchar4, 2, cudaReadModeNo
rmalizedFloat> t, float x, float y) | | static __inline__ __device__ float4 tex2D(texture<uchar4, cudaTextureType2D
, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex2D(texture<short, 2, cudaReadModeNorm
alizedFloat> t, float x, float y) | | static __inline__ __device__ float tex2D(texture<short, cudaTextureType2D,
cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex2D(texture<unsigned short, 2, cudaRea
dModeNormalizedFloat> t, float x, float y) | | static __inline__ __device__ float tex2D(texture<unsigned short, cudaTextur
eType2D, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex2D(texture<short1, 2, cudaReadModeNo
rmalizedFloat> t, float x, float y) | | static __inline__ __device__ float1 tex2D(texture<short1, cudaTextureType2D
, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex2D(texture<ushort1, 2, cudaReadModeN
ormalizedFloat> t, float x, float y) | | static __inline__ __device__ float1 tex2D(texture<ushort1, cudaTextureType2
D, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex2D(texture<short2, 2, cudaReadModeNo
rmalizedFloat> t, float x, float y) | | static __inline__ __device__ float2 tex2D(texture<short2, cudaTextureType2D
, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex2D(texture<ushort2, 2, cudaReadModeN
ormalizedFloat> t, float x, float y) | | static __inline__ __device__ float2 tex2D(texture<ushort2, cudaTextureType2
D, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2D(texture<short4, 2, cudaReadModeNo
rmalizedFloat> t, float x, float y) | | static __inline__ __device__ float4 tex2D(texture<short4, cudaTextureType2D
, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | | int4 v = __itexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2D(texture<ushort4, 2, cudaReadModeN
ormalizedFloat> t, float x, float y) | | static __inline__ __device__ float4 tex2D(texture<ushort4, cudaTextureType2
D, cudaReadModeNormalizedFloat> t, float x, float y) | |
| { | | { | |
| uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | | uint4 v = __utexfetch(t, make_float4(x, y, 0, 0)); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
|
| * 3D Texture functions
* | | * 1D Layered Texture functions
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ char tex3D(texture<char, 3, cudaReadModeElemen
tType> t, float x, float y, float z) | | static __inline__ __device__ char tex1DLayered(texture<char, cudaTextureTyp
e1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| #if __SIGNED_CHARS__ | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| #else | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| #endif | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | | |
| return (char)v.x; | | return (char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ signed char tex3D(texture<signed char, 3, cuda
ReadModeElementType> t, float x, float y, float z) | | static __inline__ __device__ signed char tex1DLayered(texture<signed char,
cudaTextureType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return (signed char)v.x; | | return (signed char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned char tex3D(texture<unsigned char, 3,
cudaReadModeElementType> t, float x, float y, float z) | | static __inline__ __device__ unsigned char tex1DLayered(texture<unsigned ch
ar, cudaTextureType1DLayered, cudaReadModeElementType> t, float x, int laye
r) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return (unsigned char)v.x; | | return (unsigned char)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char1 tex3D(texture<char1, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ char1 tex1DLayered(texture<char1, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_char1(v.x); | | return make_char1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar1 tex3D(texture<uchar1, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ uchar1 tex1DLayered(texture<uchar1, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_uchar1(v.x); | | return make_uchar1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char2 tex3D(texture<char2, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ char2 tex1DLayered(texture<char2, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_char2(v.x, v.y); | | return make_char2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar2 tex3D(texture<uchar2, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ uchar2 tex1DLayered(texture<uchar2, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_uchar2(v.x, v.y); | | return make_uchar2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char4 tex3D(texture<char4, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ char4 tex1DLayered(texture<char4, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_char4(v.x, v.y, v.z, v.w); | | return make_char4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar4 tex3D(texture<uchar4, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ uchar4 tex1DLayered(texture<uchar4, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_uchar4(v.x, v.y, v.z, v.w); | | return make_uchar4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ short tex3D(texture<short, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ short tex1DLayered(texture<short, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return (short)v.x; | | return (short)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned short tex3D(texture<unsigned short, 3
, cudaReadModeElementType> t, float x, float y, float z) | | static __inline__ __device__ unsigned short tex1DLayered(texture<unsigned s
hort, cudaTextureType1DLayered, cudaReadModeElementType> t, float x, int la
yer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return (unsigned short)v.x; | | return (unsigned short)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short1 tex3D(texture<short1, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ short1 tex1DLayered(texture<short1, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_short1(v.x); | | return make_short1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort1 tex3D(texture<ushort1, 3, cudaReadMode
ElementType> t, float x, float y, float z) | | static __inline__ __device__ ushort1 tex1DLayered(texture<ushort1, cudaText
ureType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_ushort1(v.x); | | return make_ushort1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short2 tex3D(texture<short2, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ short2 tex1DLayered(texture<short2, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_short2(v.x, v.y); | | return make_short2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort2 tex3D(texture<ushort2, 3, cudaReadMode
ElementType> t, float x, float y, float z) | | static __inline__ __device__ ushort2 tex1DLayered(texture<ushort2, cudaText
ureType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_ushort2(v.x, v.y); | | return make_ushort2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short4 tex3D(texture<short4, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ short4 tex1DLayered(texture<short4, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_short4(v.x, v.y, v.z, v.w); | | return make_short4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort4 tex3D(texture<ushort4, 3, cudaReadMode
ElementType> t, float x, float y, float z) | | static __inline__ __device__ ushort4 tex1DLayered(texture<ushort4, cudaText
ureType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_ushort4(v.x, v.y, v.z, v.w); | | return make_ushort4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ int tex3D(texture<int, 3, cudaReadModeElementT
ype> t, float x, float y, float z) | | static __inline__ __device__ int tex1DLayered(texture<int, cudaTextureType1
DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return (int)v.x; | | return (int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned int tex3D(texture<unsigned int, 3, cu
daReadModeElementType> t, float x, float y, float z) | | static __inline__ __device__ unsigned int tex1DLayered(texture<unsigned int
, cudaTextureType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return (unsigned int)v.x; | | return (unsigned int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int1 tex3D(texture<int1, 3, cudaReadModeElemen
tType> t, float x, float y, float z) | | static __inline__ __device__ int1 tex1DLayered(texture<int1, cudaTextureTyp
e1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_int1(v.x); | | return make_int1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint1 tex3D(texture<uint1, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ uint1 tex1DLayered(texture<uint1, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_uint1(v.x); | | return make_uint1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int2 tex3D(texture<int2, 3, cudaReadModeElemen
tType> t, float x, float y, float z) | | static __inline__ __device__ int2 tex1DLayered(texture<int2, cudaTextureTyp
e1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_int2(v.x, v.y); | | return make_int2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint2 tex3D(texture<uint2, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ uint2 tex1DLayered(texture<uint2, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_uint2(v.x, v.y); | | return make_uint2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int4 tex3D(texture<int4, 3, cudaReadModeElemen
tType> t, float x, float y, float z) | | static __inline__ __device__ int4 tex1DLayered(texture<int4, cudaTextureTyp
e1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_int4(v.x, v.y, v.z, v.w); | | return make_int4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint4 tex3D(texture<uint4, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ uint4 tex1DLayered(texture<uint4, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_uint4(v.x, v.y, v.z, v.w); | | return make_uint4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| #if !defined(__LP64__) | | #if !defined(__LP64__) | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ long tex3D(texture<long, 3, cudaReadModeElemen
tType> t, float x, float y, float z) | | static __inline__ __device__ long tex1DLayered(texture<long, cudaTextureTyp
e1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return (long)v.x; | | return (long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ unsigned long tex3D(texture<unsigned long, 3,
cudaReadModeElementType> t, float x, float y, float z) | | static __inline__ __device__ unsigned long tex1DLayered(texture<unsigned lo
ng, cudaTextureType1DLayered, cudaReadModeElementType> t, float x, int laye
r) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return (unsigned long)v.x; | | return (unsigned long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long1 tex3D(texture<long1, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ long1 tex1DLayered(texture<long1, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_long1(v.x); | | return make_long1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong1 tex3D(texture<ulong1, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ ulong1 tex1DLayered(texture<ulong1, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_ulong1(v.x); | | return make_ulong1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long2 tex3D(texture<long2, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ long2 tex1DLayered(texture<long2, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_long2(v.x, v.y); | | return make_long2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong2 tex3D(texture<ulong2, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ ulong2 tex1DLayered(texture<ulong2, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_ulong2(v.x, v.y); | | return make_ulong2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ long4 tex3D(texture<long4, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ long4 tex1DLayered(texture<long4, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_long4(v.x, v.y, v.z, v.w); | | return make_long4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ulong4 tex3D(texture<ulong4, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ ulong4 tex1DLayered(texture<ulong4, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_ulong4(v.x, v.y, v.z, v.w); | | return make_ulong4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| #endif /* !__LP64__ */ | | #endif /* !__LP64__ */ | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex3D(texture<float, 3, cudaReadModeElem
entType> t, float x, float y, float z) | | static __inline__ __device__ float tex1DLayered(texture<float, cudaTextureT
ype1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| float4 v = __ftexfetch(t, make_float4(x, y, z, 0)); | | float4 v = __ftexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return v.x; | | return v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex3D(texture<float1, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ float1 tex1DLayered(texture<float1, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| float4 v = __ftexfetch(t, make_float4(x, y, z, 0)); | | float4 v = __ftexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_float1(v.x); | | return make_float1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex3D(texture<float2, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ float2 tex1DLayered(texture<float2, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| float4 v = __ftexfetch(t, make_float4(x, y, z, 0)); | | float4 v = __ftexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_float2(v.x, v.y); | | return make_float2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex3D(texture<float4, 3, cudaReadModeEl
ementType> t, float x, float y, float z) | | static __inline__ __device__ float4 tex1DLayered(texture<float4, cudaTextur
eType1DLayered, cudaReadModeElementType> t, float x, int layer) | |
| { | | { | |
|
| float4 v = __ftexfetch(t, make_float4(x, y, z, 0)); | | float4 v = __ftexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| | | | |
| return make_float4(v.x, v.y, v.z, v.w); | | return make_float4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex3D(texture<char, 3, cudaReadModeNorma
lizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float tex1DLayered(texture<char, cudaTextureTy
pe1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| #if __SIGNED_CHARS__ | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| #else | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| #endif | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex3D(texture<signed char, 3, cudaReadMo
deNormalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float tex1DLayered(texture<signed char, cudaTe
xtureType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex3D(texture<unsigned char, 3, cudaRead
ModeNormalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float tex1DLayered(texture<unsigned char, cuda
TextureType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex3D(texture<char1, 3, cudaReadModeNor
malizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float1 tex1DLayered(texture<char1, cudaTexture
Type1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex3D(texture<uchar1, 3, cudaReadModeNo
rmalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float1 tex1DLayered(texture<uchar1, cudaTextur
eType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex3D(texture<char2, 3, cudaReadModeNor
malizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float2 tex1DLayered(texture<char2, cudaTexture
Type1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex3D(texture<uchar2, 3, cudaReadModeNo
rmalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float2 tex1DLayered(texture<uchar2, cudaTextur
eType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex3D(texture<char4, 3, cudaReadModeNor
malizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float4 tex1DLayered(texture<char4, cudaTexture
Type1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex3D(texture<uchar4, 3, cudaReadModeNo
rmalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float4 tex1DLayered(texture<uchar4, cudaTextur
eType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| static __inline__ __device__ float tex3D(texture<short, 3, cudaReadModeNorm
alizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float tex1DLayered(texture<short, cudaTextureT
ype1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float tex3D(texture<unsigned short, 3, cudaRea
dModeNormalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float tex1DLayered(texture<unsigned short, cud
aTextureType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return w.x; | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex3D(texture<short1, 3, cudaReadModeNo
rmalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float1 tex1DLayered(texture<short1, cudaTextur
eType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float1 tex3D(texture<ushort1, 3, cudaReadModeN
ormalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float1 tex1DLayered(texture<ushort1, cudaTextu
reType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float1(w.x); | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex3D(texture<short2, 3, cudaReadModeNo
rmalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float2 tex1DLayered(texture<short2, cudaTextur
eType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float2 tex3D(texture<ushort2, 3, cudaReadModeN
ormalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float2 tex1DLayered(texture<ushort2, cudaTextu
reType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float2(w.x, w.y); | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex3D(texture<short4, 3, cudaReadModeNo
rmalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float4 tex1DLayered(texture<short4, cudaTextur
eType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | | int4 v = __itexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex3D(texture<ushort4, 3, cudaReadModeN
ormalizedFloat> t, float x, float y, float z) | | static __inline__ __device__ float4 tex1DLayered(texture<ushort4, cudaTextu
reType1DLayered, cudaReadModeNormalizedFloat> t, float x, int layer) | |
| { | | { | |
|
| uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | | uint4 v = __utexfetchl(t, make_float4(x, 0, 0, 0), layer); | |
| float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as
_float(v.z), __int_as_float(v.w)); | |
| | | | |
| return make_float4(w.x, w.y, w.z, w.w); | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| #define __utexfetchi \ | | /************************************************************************** | |
| 000 incorect invocation of builtin __utexfetchi 000 | | ***** | |
| #define __itexfetchi \ | | * | |
| 000 incorect invocation of builtin __itexfetchi 000 | | * | |
| #define __ftexfetchi \ | | * 2D Layered Texture functions | |
| 000 incorect invocation of builtin __ftexfetchi 000 | | * | |
| #define __utexfetch \ | | * | |
| 000 incorect invocation of builtin __utexfetch 000 | | * | |
| #define __itexfetch \ | | *************************************************************************** | |
| 000 incorect invocation of builtin __itexfetch 000 | | ****/ | |
| #define __ftexfetch \ | | | |
| 000 incorect invocation of builtin __ftexfetch 000 | | | |
| | | | |
|
| #elif defined(__CUDABE__) | | static __inline__ __device__ char tex2DLayered(texture<char, cudaTextureTyp | |
| | | e2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| | | { | |
| | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | | |
|
| extern uint4 __utexfetchi1D(const void*, int4); | | return (char)v.x; | |
| extern int4 __itexfetchi1D(const void*, int4); | | } | |
| extern float4 __ftexfetchi1D(const void*, int4); | | | |
| extern uint4 __utexfetch1D(const void*, float4); | | | |
| extern int4 __itexfetch1D(const void*, float4); | | | |
| extern float4 __ftexfetch1D(const void*, float4); | | | |
| extern uint4 __utexfetch2D(const void*, float4); | | | |
| extern int4 __itexfetch2D(const void*, float4); | | | |
| extern float4 __ftexfetch2D(const void*, float4); | | | |
| extern uint4 __utexfetch3D(const void*, float4); | | | |
| extern int4 __itexfetch3D(const void*, float4); | | | |
| extern float4 __ftexfetch3D(const void*, float4); | | | |
| | | | |
|
| #define __utexfetchi(t, i) \ | | static __inline__ __device__ signed char tex2DLayered(texture<signed char, | |
| __utexfetchi1D(t, i) | | cudaTextureType2DLayered, cudaReadModeElementType> t, float x, float y, int | |
| #define __itexfetchi(t, i) \ | | layer) | |
| __itexfetchi1D(t, i) | | { | |
| #define __ftexfetchi(t, i) \ | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| __ftexfetchi1D(t, i) | | | |
| #define __utexfetch(t, i, d) \ | | | |
| __utexfetch##d##D(t, i) | | | |
| #define __itexfetch(t, i, d) \ | | | |
| __itexfetch##d##D(t, i) | | | |
| #define __ftexfetch(t, i, d) \ | | | |
| __ftexfetch##d##D(t, i) | | | |
| | | | |
|
| #endif /* __cplusplus && __CUDACC__ */ | | return (signed char)v.x; | |
| | | } | |
| | | | |
|
| #if defined(__cplusplus) && defined(__CUDACC__) | | static __inline__ __device__ unsigned char tex2DLayered(texture<unsigned ch | |
| | | ar, cudaTextureType2DLayered, cudaReadModeElementType> t, float x, float y, | |
| | | int layer) | |
| | | { | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
|
| #if !defined(__CUDA_ARCH__) || __CUDA_ARCH__ >= 200 | | return (unsigned char)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ char1 tex2DLayered(texture<char1, cudaTextureT | |
| | | ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| | | { | |
| | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_char1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ uchar1 tex2DLayered(texture<uchar1, cudaTextur | |
| | | eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| | | { | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_uchar1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ char2 tex2DLayered(texture<char2, cudaTextureT | |
| | | ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| | | { | |
| | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_char2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ uchar2 tex2DLayered(texture<uchar2, cudaTextur | |
| | | eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| | | { | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_uchar2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ char4 tex2DLayered(texture<char4, cudaTextureT | |
| | | ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| | | { | |
| | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_char4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ uchar4 tex2DLayered(texture<uchar4, cudaTextur | |
| | | eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| | | { | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_uchar4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| /**************************************************************************
***** | | /**************************************************************************
***** | |
| *
* | | *
* | |
| *
* | | *
* | |
| *
* | | *
* | |
| ***************************************************************************
****/ | | ***************************************************************************
****/ | |
| | | | |
|
| /*DEVICE_BUILTIN*/ | | static __inline__ __device__ short tex2DLayered(texture<short, cudaTextureT | |
| template<int comp, class T> extern __device__ int4 __itex2Dgather(texture | | ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| <T, 2, cudaReadModeElementType> t, float2 i, int c = comp); | | { | |
| /*DEVICE_BUILTIN*/ | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| template<int comp, class T> extern __device__ uint4 __utex2Dgather(texture | | | |
| <T, 2, cudaReadModeElementType> t, float2 i, int c = comp); | | | |
| /*DEVICE_BUILTIN*/ | | | |
| template<int comp, class T> extern __device__ float4 __ftex2Dgather(texture | | | |
| <T, 2, cudaReadModeElementType> t, float2 i, int c = comp); | | | |
| | | | |
|
| #define __tex2DgatherUtil(T, f, r, c) \ | | return (short)v.x; | |
| { T v = f<c>(t, make_float2(x, y)); return r; } | | } | |
| | | | |
|
| #define __tex2DgatherUtil1(T, f, r) \ | | static __inline__ __device__ unsigned short tex2DLayered(texture<unsigned s | |
| __tex2DgatherUtil(T, f, r, 0) | | hort, cudaTextureType2DLayered, cudaReadModeElementType> t, float x, float | |
| | | y, int layer) | |
| | | { | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
|
| #define __tex2DgatherUtil2(T, f, r) \ | | return (unsigned short)v.x; | |
| if (comp == 1) __tex2DgatherUtil(T, f, r, 1) \ | | } | |
| else __tex2DgatherUtil1(T, f, r) | | | |
| | | | |
|
| #define __tex2DgatherUtil3(T, f, r) \ | | static __inline__ __device__ short1 tex2DLayered(texture<short1, cudaTextur | |
| if (comp == 2) __tex2DgatherUtil(T, f, r, 2) \ | | eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| else __tex2DgatherUtil2(T, f, r) | | { | |
| | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
|
| #define __tex2DgatherUtil4(T, f, r) \ | | return make_short1(v.x); | |
| if (comp == 3) __tex2DgatherUtil(T, f, r, 3) \ | | } | |
| else __tex2DgatherUtil3(T, f, r) | | | |
| | | | |
|
| static __inline__ __device__ char4 tex2Dgather(texture<char, 2, cudaReadMod
eElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ ushort1 tex2DLayered(texture<ushort1, cudaText
ureType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(int4, __itex2Dgather, make_char4(v.x, v.y, v.z, v.w)); | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_ushort1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char4 tex2Dgather(texture<signed char, 2, cuda
ReadModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ short2 tex2DLayered(texture<short2, cudaTextur
eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(int4, __itex2Dgather, make_char4(v.x, v.y, v.z, v.w)); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_short2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar4 tex2Dgather(texture<unsigned char, 2, c
udaReadModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ ushort2 tex2DLayered(texture<ushort2, cudaText
ureType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(uint4, __utex2Dgather, make_uchar4(v.x, v.y, v.z, v.w) | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ); | | | |
| | | return make_ushort2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char4 tex2Dgather(texture<char1, 2, cudaReadMo
deElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ short4 tex2DLayered(texture<short4, cudaTextur
eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(int4, __itex2Dgather, make_char4(v.x, v.y, v.z, v.w)); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_short4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar4 tex2Dgather(texture<uchar1, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ ushort4 tex2DLayered(texture<ushort4, cudaText
ureType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(uint4, __utex2Dgather, make_uchar4(v.x, v.y, v.z, v.w) | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ); | | | |
| | | return make_ushort4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char4 tex2Dgather(texture<char2, 2, cudaReadMo | | /************************************************************************** | |
| deElementType> t, float x, float y, int comp = 0) | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ int tex2DLayered(texture<int, cudaTextureType2 | |
| | | DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil2(int4, __itex2Dgather, make_char4(v.x, v.y, v.z, v.w)); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return (int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar4 tex2Dgather(texture<uchar2, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ unsigned int tex2DLayered(texture<unsigned int
, cudaTextureType2DLayered, cudaReadModeElementType> t, float x, float y, i
nt layer) | |
| { | | { | |
|
| __tex2DgatherUtil2(uint4, __utex2Dgather, make_uchar4(v.x, v.y, v.z, v.w) | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ); | | | |
| | | return (unsigned int)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char4 tex2Dgather(texture<char3, 2, cudaReadMo
deElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ int1 tex2DLayered(texture<int1, cudaTextureTyp
e2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil3(int4, __itex2Dgather, make_char4(v.x, v.y, v.z, v.w)); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_int1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar4 tex2Dgather(texture<uchar3, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ uint1 tex2DLayered(texture<uint1, cudaTextureT
ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil3(uint4, __utex2Dgather, make_uchar4(v.x, v.y, v.z, v.w) | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ); | | | |
| | | return make_uint1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ char4 tex2Dgather(texture<char4, 2, cudaReadMo
deElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ int2 tex2DLayered(texture<int2, cudaTextureTyp
e2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil4(int4, __itex2Dgather, make_char4(v.x, v.y, v.z, v.w)); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_int2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uchar4 tex2Dgather(texture<uchar4, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ uint2 tex2DLayered(texture<uint2, cudaTextureT
ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil4(uint4, __utex2Dgather, make_uchar4(v.x, v.y, v.z, v.w) | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ); | | | |
| | | return make_uint2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short4 tex2Dgather(texture<signed short, 2, cu
daReadModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ int4 tex2DLayered(texture<int4, cudaTextureTyp
e2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(int4, __itex2Dgather, make_short4(v.x, v.y, v.z, v.w)) | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ; | | | |
| | | return make_int4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort4 tex2Dgather(texture<unsigned short, 2,
cudaReadModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ uint4 tex2DLayered(texture<uint4, cudaTextureT
ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(uint4, __utex2Dgather, make_ushort4(v.x, v.y, v.z, v.w | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| )); | | | |
| | | return make_uint4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short4 tex2Dgather(texture<short1, 2, cudaRead | | #if !defined(__LP64__) | |
| ModeElementType> t, float x, float y, int comp = 0) | | | |
| | | /************************************************************************** | |
| | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ long tex2DLayered(texture<long, cudaTextureTyp | |
| | | e2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(int4, __itex2Dgather, make_short4(v.x, v.y, v.z, v.w)) | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ; | | | |
| | | return (long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort4 tex2Dgather(texture<ushort1, 2, cudaRe
adModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ unsigned long tex2DLayered(texture<unsigned lo
ng, cudaTextureType2DLayered, cudaReadModeElementType> t, float x, float y,
int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(uint4, __utex2Dgather, make_ushort4(v.x, v.y, v.z, v.w | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| )); | | | |
| | | return (unsigned long)v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short4 tex2Dgather(texture<short2, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ long1 tex2DLayered(texture<long1, cudaTextureT
ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil2(int4, __itex2Dgather, make_short4(v.x, v.y, v.z, v.w)) | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ; | | | |
| | | return make_long1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort4 tex2Dgather(texture<ushort2, 2, cudaRe
adModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ ulong1 tex2DLayered(texture<ulong1, cudaTextur
eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil2(uint4, __utex2Dgather, make_ushort4(v.x, v.y, v.z, v.w | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| )); | | | |
| | | return make_ulong1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short4 tex2Dgather(texture<short3, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ long2 tex2DLayered(texture<long2, cudaTextureT
ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil3(int4, __itex2Dgather, make_short4(v.x, v.y, v.z, v.w)) | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ; | | | |
| | | return make_long2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort4 tex2Dgather(texture<ushort3, 2, cudaRe
adModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ ulong2 tex2DLayered(texture<ulong2, cudaTextur
eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil3(uint4, __utex2Dgather, make_ushort4(v.x, v.y, v.z, v.w | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| )); | | | |
| | | return make_ulong2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ short4 tex2Dgather(texture<short4, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ long4 tex2DLayered(texture<long4, cudaTextureT
ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil4(int4, __itex2Dgather, make_short4(v.x, v.y, v.z, v.w)) | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| ; | | | |
| | | return make_long4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ ushort4 tex2Dgather(texture<ushort4, 2, cudaRe
adModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ ulong4 tex2DLayered(texture<ulong4, cudaTextur
eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil4(uint4, __utex2Dgather, make_ushort4(v.x, v.y, v.z, v.w | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| )); | | | |
| | | return make_ulong4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int4 tex2Dgather(texture<signed int, 2, cudaRe | | #endif /* !__LP64__ */ | |
| adModeElementType> t, float x, float y, int comp = 0) | | | |
| | | /************************************************************************** | |
| | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ float tex2DLayered(texture<float, cudaTextureT | |
| | | ype2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(int4, __itex2Dgather, v); | | float4 v = __ftexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return v.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint4 tex2Dgather(texture<unsigned int, 2, cud
aReadModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float1 tex2DLayered(texture<float1, cudaTextur
eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(uint4, __utex2Dgather, v); | | float4 v = __ftexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_float1(v.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int4 tex2Dgather(texture<int1, 2, cudaReadMode
ElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float2 tex2DLayered(texture<float2, cudaTextur
eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(int4, __itex2Dgather, v); | | float4 v = __ftexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_float2(v.x, v.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint4 tex2Dgather(texture<uint1, 2, cudaReadMo
deElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float4 tex2DLayered(texture<float4, cudaTextur
eType2DLayered, cudaReadModeElementType> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(uint4, __utex2Dgather, v); | | float4 v = __ftexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | | |
| | | return make_float4(v.x, v.y, v.z, v.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int4 tex2Dgather(texture<int2, 2, cudaReadMode | | /************************************************************************** | |
| ElementType> t, float x, float y, int comp = 0) | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ float tex2DLayered(texture<char, cudaTextureTy | |
| | | pe2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil2(int4, __itex2Dgather, v); | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint4 tex2Dgather(texture<uint2, 2, cudaReadMo
deElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float tex2DLayered(texture<signed char, cudaTe
xtureType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int l
ayer) | |
| { | | { | |
|
| __tex2DgatherUtil2(uint4, __utex2Dgather, v); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int4 tex2Dgather(texture<int3, 2, cudaReadMode
ElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float tex2DLayered(texture<unsigned char, cuda
TextureType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int
layer) | |
| { | | { | |
|
| __tex2DgatherUtil3(int4, __itex2Dgather, v); | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint4 tex2Dgather(texture<uint3, 2, cudaReadMo
deElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float1 tex2DLayered(texture<char1, cudaTexture
Type2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil3(uint4, __utex2Dgather, v); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ int4 tex2Dgather(texture<int4, 2, cudaReadMode
ElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float1 tex2DLayered(texture<uchar1, cudaTextur
eType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer
) | |
| { | | { | |
|
| __tex2DgatherUtil4(int4, __itex2Dgather, v); | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float1(w.x); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ uint4 tex2Dgather(texture<uint4, 2, cudaReadMo
deElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float2 tex2DLayered(texture<char2, cudaTexture
Type2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil4(uint4, __utex2Dgather, v); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2Dgather(texture<float, 2, cudaReadM
odeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float2 tex2DLayered(texture<uchar2, cudaTextur
eType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer
) | |
| { | | { | |
|
| __tex2DgatherUtil1(float4, __ftex2Dgather, v); | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float2(w.x, w.y); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2Dgather(texture<float1, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float4 tex2DLayered(texture<char4, cudaTexture
Type2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil1(float4, __ftex2Dgather, v); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2Dgather(texture<float2, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float4 tex2DLayered(texture<uchar4, cudaTextur
eType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer
) | |
| { | | { | |
|
| __tex2DgatherUtil2(float4, __ftex2Dgather, v); | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float4(w.x, w.y, w.z, w.w); | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2Dgather(texture<float3, 2, cudaRead | | /************************************************************************** | |
| ModeElementType> t, float x, float y, int comp = 0) | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ float tex2DLayered(texture<short, cudaTextureT | |
| | | ype2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer) | |
| { | | { | |
|
| __tex2DgatherUtil3(float4, __ftex2Dgather, v); | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| } | | } | |
| | | | |
|
| static __inline__ __device__ float4 tex2Dgather(texture<float4, 2, cudaRead
ModeElementType> t, float x, float y, int comp = 0) | | static __inline__ __device__ float tex2DLayered(texture<unsigned short, cud
aTextureType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, in
t layer) | |
| { | | { | |
|
| __tex2DgatherUtil4(float4, __ftex2Dgather, v); | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| } | | } | |
| | | | |
|
| #undef __tex2DgatherUtil | | static __inline__ __device__ float1 tex2DLayered(texture<short1, cudaTextur | |
| #undef __tex2DgatherUtil1 | | eType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer | |
| #undef __tex2DgatherUtil2 | | ) | |
| #undef __tex2DgatherUtil3 | | { | |
| #undef __tex2DgatherUtil4 | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
|
| #define __utex2Dgather \ | | return make_float1(w.x); | |
| 000 incorect invocation of builtin __utex2Dgather 000 | | } | |
| #define __itex2Dgather \ | | | |
| 000 incorect invocation of builtin __itex2Dgather 000 | | | |
| #define __ftex2Dgather \ | | | |
| 000 incorect invocation of builtin __ftex2Dgather 000 | | | |
| | | | |
|
| #endif /* !__CUDA_ARCH__ || __CUDA_ARCH__ >= 200 */ | | static __inline__ __device__ float1 tex2DLayered(texture<ushort1, cudaTextu | |
| | | reType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int laye | |
| | | r) | |
| | | { | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float1(w.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float2 tex2DLayered(texture<short2, cudaTextur | |
| | | eType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer | |
| | | ) | |
| | | { | |
| | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float2(w.x, w.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float2 tex2DLayered(texture<ushort2, cudaTextu | |
| | | reType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int laye | |
| | | r) | |
| | | { | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float2(w.x, w.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float4 tex2DLayered(texture<short4, cudaTextur | |
| | | eType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int layer | |
| | | ) | |
| | | { | |
| | | int4 v = __itexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float4(w.x, w.y, w.z, w.w); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float4 tex2DLayered(texture<ushort4, cudaTextu | |
| | | reType2DLayered, cudaReadModeNormalizedFloat> t, float x, float y, int laye | |
| | | r) | |
| | | { | |
| | | uint4 v = __utexfetchl(t, make_float4(x, y, 0, 0), layer); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float4(w.x, w.y, w.z, w.w); | |
| | | } | |
| | | | |
| | | /************************************************************************** | |
| | | ***** | |
| | | * | |
| | | * | |
| | | * 3D Texture functions | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ char tex3D(texture<char, cudaTextureType3D, cu | |
| | | daReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | | |
| | | return (char)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ signed char tex3D(texture<signed char, cudaTex | |
| | | tureType3D, cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return (signed char)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ unsigned char tex3D(texture<unsigned char, cud | |
| | | aTextureType3D, cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return (unsigned char)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ char1 tex3D(texture<char1, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_char1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ uchar1 tex3D(texture<uchar1, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_uchar1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ char2 tex3D(texture<char2, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_char2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ uchar2 tex3D(texture<uchar2, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_uchar2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ char4 tex3D(texture<char4, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_char4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ uchar4 tex3D(texture<uchar4, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_uchar4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | /************************************************************************** | |
| | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ short tex3D(texture<short, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return (short)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ unsigned short tex3D(texture<unsigned short, c | |
| | | udaTextureType3D, cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return (unsigned short)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ short1 tex3D(texture<short1, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_short1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ ushort1 tex3D(texture<ushort1, cudaTextureType | |
| | | 3D, cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_ushort1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ short2 tex3D(texture<short2, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_short2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ ushort2 tex3D(texture<ushort2, cudaTextureType | |
| | | 3D, cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_ushort2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ short4 tex3D(texture<short4, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_short4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ ushort4 tex3D(texture<ushort4, cudaTextureType | |
| | | 3D, cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_ushort4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | /************************************************************************** | |
| | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ int tex3D(texture<int, cudaTextureType3D, cuda | |
| | | ReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return (int)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ unsigned int tex3D(texture<unsigned int, cudaT | |
| | | extureType3D, cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return (unsigned int)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ int1 tex3D(texture<int1, cudaTextureType3D, cu | |
| | | daReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_int1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ uint1 tex3D(texture<uint1, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_uint1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ int2 tex3D(texture<int2, cudaTextureType3D, cu | |
| | | daReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_int2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ uint2 tex3D(texture<uint2, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_uint2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ int4 tex3D(texture<int4, cudaTextureType3D, cu | |
| | | daReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_int4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ uint4 tex3D(texture<uint4, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_uint4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | #if !defined(__LP64__) | |
| | | | |
| | | /************************************************************************** | |
| | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ long tex3D(texture<long, cudaTextureType3D, cu | |
| | | daReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return (long)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ unsigned long tex3D(texture<unsigned long, cud | |
| | | aTextureType3D, cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return (unsigned long)v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ long1 tex3D(texture<long1, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_long1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ ulong1 tex3D(texture<ulong1, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_ulong1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ long2 tex3D(texture<long2, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_long2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ ulong2 tex3D(texture<ulong2, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_ulong2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ long4 tex3D(texture<long4, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_long4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ ulong4 tex3D(texture<ulong4, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_ulong4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | #endif /* !__LP64__ */ | |
| | | | |
| | | /************************************************************************** | |
| | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ float tex3D(texture<float, cudaTextureType3D, | |
| | | cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | float4 v = __ftexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return v.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float1 tex3D(texture<float1, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | float4 v = __ftexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_float1(v.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float2 tex3D(texture<float2, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | float4 v = __ftexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_float2(v.x, v.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float4 tex3D(texture<float4, cudaTextureType3D | |
| | | , cudaReadModeElementType> t, float x, float y, float z) | |
| | | { | |
| | | float4 v = __ftexfetch(t, make_float4(x, y, z, 0)); | |
| | | | |
| | | return make_float4(v.x, v.y, v.z, v.w); | |
| | | } | |
| | | | |
| | | /************************************************************************** | |
| | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ float tex3D(texture<char, cudaTextureType3D, c | |
| | | udaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | #if defined(_CHAR_UNSIGNED) || defined(__CHAR_UNSIGNED__) | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | #else /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | #endif /* _CHAR_UNSIGNED || __CHAR_UNSIGNED__ */ | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float tex3D(texture<signed char, cudaTextureTy | |
| | | pe3D, cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float tex3D(texture<unsigned char, cudaTexture | |
| | | Type3D, cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float1 tex3D(texture<char1, cudaTextureType3D, | |
| | | cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float1(w.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float1 tex3D(texture<uchar1, cudaTextureType3D | |
| | | , cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float1(w.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float2 tex3D(texture<char2, cudaTextureType3D, | |
| | | cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float2(w.x, w.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float2 tex3D(texture<uchar2, cudaTextureType3D | |
| | | , cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float2(w.x, w.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float4 tex3D(texture<char4, cudaTextureType3D, | |
| | | cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float4(w.x, w.y, w.z, w.w); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float4 tex3D(texture<uchar4, cudaTextureType3D | |
| | | , cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float4(w.x, w.y, w.z, w.w); | |
| | | } | |
| | | | |
| | | /************************************************************************** | |
| | | ***** | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | * | |
| | | *************************************************************************** | |
| | | ****/ | |
| | | | |
| | | static __inline__ __device__ float tex3D(texture<short, cudaTextureType3D, | |
| | | cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float tex3D(texture<unsigned short, cudaTextur | |
| | | eType3D, cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return w.x; | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float1 tex3D(texture<short1, cudaTextureType3D | |
| | | , cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float1(w.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float1 tex3D(texture<ushort1, cudaTextureType3 | |
| | | D, cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float1(w.x); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float2 tex3D(texture<short2, cudaTextureType3D | |
| | | , cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float2(w.x, w.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float2 tex3D(texture<ushort2, cudaTextureType3 | |
| | | D, cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float2(w.x, w.y); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float4 tex3D(texture<short4, cudaTextureType3D | |
| | | , cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | int4 v = __itexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float4(w.x, w.y, w.z, w.w); | |
| | | } | |
| | | | |
| | | static __inline__ __device__ float4 tex3D(texture<ushort4, cudaTextureType3 | |
| | | D, cudaReadModeNormalizedFloat> t, float x, float y, float z) | |
| | | { | |
| | | uint4 v = __utexfetch(t, make_float4(x, y, z, 0)); | |
| | | float4 w = make_float4(__int_as_float(v.x), __int_as_float(v.y), __int_as | |
| | | _float(v.z), __int_as_float(v.w)); | |
| | | | |
| | | return make_float4(w.x, w.y, w.z, w.w); | |
| | | } | |
| | | | |
| #elif defined(__CUDABE__) | | #elif defined(__CUDABE__) | |
| | | | |
|
| extern uint4 __utex2Dgather0(const void*, float2); | | extern uint4 __utexfetchi1D(const void*, int4); | |
| extern uint4 __utex2Dgather1(const void*, float2); | | extern int4 __itexfetchi1D(const void*, int4); | |
| extern uint4 __utex2Dgather2(const void*, float2); | | extern float4 __ftexfetchi1D(const void*, int4); | |
| extern uint4 __utex2Dgather3(const void*, float2); | | extern uint4 __utexfetch1D(const void*, float4); | |
| extern int4 __itex2Dgather0(const void*, float2); | | extern int4 __itexfetch1D(const void*, float4); | |
| extern int4 __itex2Dgather1(const void*, float2); | | extern float4 __ftexfetch1D(const void*, float4); | |
| extern int4 __itex2Dgather2(const void*, float2); | | extern uint4 __utexfetch2D(const void*, float4); | |
| extern int4 __itex2Dgather3(const void*, float2); | | extern int4 __itexfetch2D(const void*, float4); | |
| extern float4 __ftex2Dgather0(const void*, float2); | | extern float4 __ftexfetch2D(const void*, float4); | |
| extern float4 __ftex2Dgather1(const void*, float2); | | extern uint4 __utexfetch3D(const void*, float4); | |
| extern float4 __ftex2Dgather2(const void*, float2); | | extern int4 __itexfetch3D(const void*, float4); | |
| extern float4 __ftex2Dgather3(const void*, float2); | | extern float4 __ftexfetch3D(const void*, float4); | |
| | | extern uint4 __utexfetchl1D(const void*, float4, int); | |
| | | extern int4 __itexfetchl1D(const void*, float4, int); | |
| | | extern float4 __ftexfetchl1D(const void*, float4, int); | |
| | | extern uint4 __utexfetchl2D(const void*, float4, int); | |
| | | extern int4 __itexfetchl2D(const void*, float4, int); | |
| | | extern float4 __ftexfetchl2D(const void*, float4, int); | |
| | | | |
|
| #define __utex2Dgather(t, i, c) \ | | #define __utexfetchi(t, i) \ | |
| __utex2Dgather##c(t, i) | | __utexfetchi1D(t, i) | |
| #define __itex2Dgather(t, i, c) \ | | #define __itexfetchi(t, i) \ | |
| __itex2Dgather##c(t, i) | | __itexfetchi1D(t, i) | |
| #define __ftex2Dgather(t, i, c) \ | | #define __ftexfetchi(t, i) \ | |
| __ftex2Dgather##c(t, i) | | __ftexfetchi1D(t, i) | |
| | | #define __utexfetch(t, i, d) \ | |
| | | __utexfetch##d##D(t, i) | |
| | | #define __itexfetch(t, i, d) \ | |
| | | __itexfetch##d##D(t, i) | |
| | | #define __ftexfetch(t, i, d) \ | |
| | | __ftexfetch##d##D(t, i) | |
| | | #define __utexfetchl(t, i, l, d) \ | |
| | | __utexfetchl##d##D(t, i, l) | |
| | | #define __itexfetchl(t, i, l, d) \ | |
| | | __itexfetchl##d##D(t, i, l) | |
| | | #define __ftexfetchl(t, i, l, d) \ | |
| | | __ftexfetchl##d##D(t, i, l) | |
| | | | |
| #endif /* __cplusplus && __CUDACC__ */ | | #endif /* __cplusplus && __CUDACC__ */ | |
| | | | |
| #endif /* !__TEXTURE_FETCH_FUNCTIONS_H__ */ | | #endif /* !__TEXTURE_FETCH_FUNCTIONS_H__ */ | |
| | | | |
End of changes. 385 change blocks. |
| 523 lines changed or deleted | | 1354 lines changed or added | |
|