Java 类com.sun.jna.ptr.PointerByReference 实例源码

项目:BIMplatform    文件:IfcEngine.java   
/**
 * Returns a data field in the actual aggregate element.
 * 
 * @param aggregate
 *            Existing aggregation
 * @param elementIndex
 *            Position in the existing aggregation, first position is 0
 * @param valueType
 *            Type of output value
 * @return Value of the specific element in the aggregation
 */
public Object engiGetAggrElement(Pointer aggregate, int elementIndex, SdaiTypes valueType) {
    Object returnValue = null;
    switch (valueType) {
    case INTEGER:
        IntByReference intRef = new IntByReference();
        engine.engiGetAggrElement(aggregate, elementIndex, valueType.ordinal(), intRef);
        returnValue = new Integer(intRef.getValue());
        break;
    case REAL:
        DoubleByReference dblRef = new DoubleByReference();
        engine.engiGetAggrElement(aggregate, elementIndex, valueType.ordinal(), dblRef);
        returnValue = new Double(dblRef.getValue());
        break;
    case STRING:
        PointerByReference strRef = new PointerByReference();
        engine.engiGetAggrElement(aggregate, elementIndex, valueType.ordinal(), strRef);
        Pointer strPtr = strRef.getValue();
        if (strPtr != null)
            returnValue = strPtr.getString(0);
        break;
    default:
        PointerByReference ptrRef = new PointerByReference();
        engine.engiGetAggrElement(aggregate, elementIndex, valueType.ordinal(), ptrRef);
        returnValue = ptrRef.getValue();
        break;
    }
    return returnValue;
}
项目:BIMplatform    文件:IfcEngine.java   
/**
 * Implementation postponed till version 1.10
 * 
 * @param iterator
 *            Existing iterator
 * @param valueType
 *            Type of output value
 * @return
 */
public Object sdaiGetAggrByIterator(Pointer iterator, SdaiTypes valueType) {
    Object returnValue = null;

    switch (valueType) {
    case REAL:
        DoubleByReference dVal = new DoubleByReference();
        engine.sdaiGetAggrByIterator(iterator, valueType.ordinal(), dVal);
        returnValue = new Double(dVal.getValue());
        break;
    case INTEGER:
    case BOOLEAN:
    case LOGICAL:
        IntByReference iVal = new IntByReference();
        engine.sdaiGetAggrByIterator(iterator, valueType.ordinal(), iVal);
        returnValue = new Integer(iVal.getValue());
        break;
    case STRING:
        PointerByReference sVal = new PointerByReference();
        engine.sdaiGetAggrByIterator(iterator, valueType.ordinal(), sVal);
        returnValue = (String) sVal.getValue().getString(0);
        break;
    default:
        PointerByReference ptr = new PointerByReference();
        engine.sdaiGetAggrByIterator(iterator, valueType.ordinal(), ptr);
        returnValue = ptr.getValue();
        break;
    }
    return returnValue;
}
项目:Elasticsearch    文件:Seccomp.java   
/** try to install our custom rule profile into sandbox_init() to block execution */
private static void macImpl(Path tmpFile) throws IOException {
    // first be defensive: we can give nice errors this way, at the very least.
    boolean supported = Constants.MAC_OS_X;
    if (supported == false) {
        throw new IllegalStateException("bug: should not be trying to initialize seatbelt for an unsupported OS");
    }

    // we couldn't link methods, could be some really ancient OS X (< Leopard) or some bug
    if (libc_mac == null) {
        throw new UnsupportedOperationException("seatbelt unavailable: could not link methods. requires Leopard or above.");
    }

    // write rules to a temporary file, which will be passed to sandbox_init()
    Path rules = Files.createTempFile(tmpFile, "es", "sb");
    Files.write(rules, Collections.singleton(SANDBOX_RULES), StandardCharsets.UTF_8);

    boolean success = false;
    try {
        PointerByReference errorRef = new PointerByReference();
        int ret = libc_mac.sandbox_init(rules.toAbsolutePath().toString(), SANDBOX_NAMED, errorRef);
        // if sandbox_init() fails, add the message from the OS (e.g. syntax error) and free the buffer
        if (ret != 0) {
            Pointer errorBuf = errorRef.getValue();
            RuntimeException e = new UnsupportedOperationException("sandbox_init(): " + errorBuf.getString(0));
            libc_mac.sandbox_free_error(errorBuf);
            throw e;
        }
        logger.debug("OS X seatbelt initialization successful");
        success = true;
    } finally {
        if (success) {
            Files.delete(rules);
        } else {
            IOUtils.deleteFilesIgnoringExceptions(rules);
        }
    }
}
项目:Fatigue-Detection    文件:STMobileApiBridge.java   
int st_mobile_face_attribute_detect(
      Pointer handle,
      byte[] image,
      int pixel_format,
      int image_width,
      int image_height,
      int image_stride,
      PointerByReference p_face,                 //检查这里是否和st_mobile_face_action_t **p_face_action_array用法类似
      IntByReference p_attributes
);
项目:Laplacian    文件:FFmpegDecodeBridge.java   
public void close() {
    if (formatContext != null) {
        PointerByReference ptr = new PointerByReference(formatContext.getPointer());
        Avformat57Library.INSTANCE.avformat_close_input(ptr);
    }
    formatContext = null;
    buffer = null;
}
项目:chromaprint-jna    文件:Fingerprint.java   
public String getString() {
    final PointerByReference ref = new PointerByReference();
    ChromaprintLibrary.INSTANCE.chromaprint_get_fingerprint(context, ref);
    String fp = ref.getValue().getString(0);
    ChromaprintLibrary.INSTANCE.chromaprint_dealloc(ref.getPointer());
    return fp;
}
项目:coordination_oru    文件:ReedsSheppCarPlanner.java   
public boolean plan() {
    ArrayList<PoseSteering> finalPath = new ArrayList<PoseSteering>();  
    for (int i = 0; i < this.goal.length; i++) {
        Pose start_ = null;
        Pose goal_ = this.goal[i];
        if (i == 0) start_ = this.start;
        else start_ = this.goal[i-1];
        path = new PointerByReference();
        pathLength = new IntByReference();
        if (collisionCircleCenters == null) {
            if (!INSTANCE.plan(mapFilename, mapResolution, robotRadius, start_.getX(), start_.getY(), start_.getTheta(), goal_.getX(), goal_.getY(), goal_.getTheta(), path, pathLength, distanceBetweenPathPoints, turningRadius)) return false;
        }
        else {
            double[] xCoords = new double[collisionCircleCenters.length];
            double[] yCoords = new double[collisionCircleCenters.length];
            int numCoords = collisionCircleCenters.length;
            for (int j = 0; j < collisionCircleCenters.length; j++) {
                xCoords[j] = collisionCircleCenters[j].x;
                yCoords[j] = collisionCircleCenters[j].y;
            }
            System.out.println("Path planning with " + collisionCircleCenters.length + " circle positions");
            if (this.mapFilename != null) {
                if (!INSTANCE.plan_multiple_circles(mapFilename, mapResolution, robotRadius, xCoords, yCoords, numCoords, start_.getX(), start_.getY(), start_.getTheta(), goal_.getX(), goal_.getY(), goal_.getTheta(), path, pathLength, distanceBetweenPathPoints, turningRadius)) return false;                 
            }
            else {
                if (!INSTANCE.plan_multiple_circles_nomap(xCoords, yCoords, numCoords, start_.getX(), start_.getY(), start_.getTheta(), goal_.getX(), goal_.getY(), goal_.getTheta(), path, pathLength, distanceBetweenPathPoints, turningRadius)) return false;                    
            }
        }
        final Pointer pathVals = path.getValue();
        final PathPose valsRef = new PathPose(pathVals);
        valsRef.read();
        int numVals = pathLength.getValue();
        PathPose[] pathPoses = (PathPose[])valsRef.toArray(numVals);
        if (i == 0) finalPath.add(new PoseSteering(pathPoses[0].x, pathPoses[0].y, pathPoses[0].theta, 0.0));
        for (int j = 1; j < pathPoses.length; j++) finalPath.add(new PoseSteering(pathPoses[j].x, pathPoses[j].y, pathPoses[j].theta, 0.0));
        INSTANCE.cleanupPath(pathVals);
    }
    this.pathPS = finalPath.toArray(new PoseSteering[finalPath.size()]);
    return true;
}
项目:Sikulix2tesseract    文件:Tesseract1.java   
/**
 * Initializes Tesseract engine.
 */
protected void init() {
    handle = TessBaseAPICreate();
    StringArray sarray = new StringArray(configList.toArray(new String[0]));
    PointerByReference configs = new PointerByReference();
    configs.setPointer(sarray);
    TessBaseAPIInit1(handle, datapath, language, ocrEngineMode, configs, configList.size());
    if (psm > -1) {
        TessBaseAPISetPageSegMode(handle, psm);
    }
}
项目:chromaprint-jna    文件:ChromaprintLibraryTest.java   
@Test
public void chromaprint_get_fingerprint() throws Exception {
    ChromaprintLibrary.INSTANCE.chromaprint_start(context, 44100, 1);
    short[] data = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10};
    ChromaprintLibrary.INSTANCE.chromaprint_feed(context, data, data.length);
    ChromaprintLibrary.INSTANCE.chromaprint_finish(context);

    final PointerByReference ref = new PointerByReference();
    ChromaprintLibrary.INSTANCE.chromaprint_get_fingerprint(context, ref);
    System.out.println(ref.getValue().getString(0));
}
项目:Laplacian    文件:AVCodecDescriptor.java   
/**
 * @param id @see AVCodecID<br>
 * C type : AVCodecID<br>
 * @param type @see org.ffmpeg.avutil55.Avutil55Library#AVMediaType<br>
 * C type : AVMediaType<br>
 * @param name C type : const char*<br>
 * @param long_name C type : const char*<br>
 * @param mime_types C type : const char**<br>
 * @param profiles C type : AVProfile*
 */
public AVCodecDescriptor(int id, int type, Pointer name, Pointer long_name, int props, PointerByReference mime_types, org.ffmpeg.avcodec57.AVProfile.ByReference profiles) {
    super();
    this.id = id;
    this.type = type;
    this.name = name;
    this.long_name = long_name;
    this.props = props;
    this.mime_types = mime_types;
    this.profiles = profiles;
}
项目:incubator-netbeans    文件:WindowsNotifier.java   
boolean GetQueuedCompletionStatus(HANDLE CompletionPort,
IntByReference lpNumberOfBytes, ByReference lpCompletionKey,
PointerByReference lpOverlapped, int dwMilliseconds);
项目:hyperscan-java    文件:Database.java   
/**
 * compile an expression into a database to use for scanning
 * @param expression Expression to compile
 * @return Compiled database
 * @throws Throwable CompileErrorException on errors concerning the pattern, otherwise different Throwable's
 */
public static Database compile(Expression expression) throws Throwable {
    PointerByReference database = new PointerByReference();
    PointerByReference error = new PointerByReference();


    int hsError = HyperscanLibrary.INSTANCE.hs_compile(expression.getExpression(),
            Util.bitEnumSetToInt(expression.getFlags()), HS_MODE_BLOCK, Pointer.NULL, database, error);

    ArrayList<Expression> expressions = new ArrayList<Expression>(1);
    expressions.add(expression);

    handleErrors(hsError, error.getValue(), expressions);

    return new Database(database.getValue(), expressions);
}
项目:hyperscan-java    文件:HyperscanLibrary.java   
int hs_compile_multi(String[] expressions, int[] flags, int[] ids, int elements, int mode, Pointer platform,
PointerByReference database, PointerByReference error);
项目:Fatigue-Detection    文件:STMobileApiBridge.java   
int st_mobile_face_attribute_create(
    String model_path,
    PointerByReference handle
);
项目:Laplacian    文件:FFmpegDecodeBridge.java   
@Nullable
public byte[] tryRead(byte[] buf) throws FFmpegException {
    if (formatContext == null) throw new IllegalStateException("Decoder is already closed");

    int retval;
    while (true) {
        retval = Avformat57Library.INSTANCE.av_read_frame(formatContext, packet);
        if (retval < 0) return null;
        packet.read();
        if (packet.stream_index != audioStreamIndex) {
            Avcodec57Library.INSTANCE.av_packet_unref(packet);
        } else break;
    }

    retval = Avcodec57Library.INSTANCE.avcodec_send_packet(codecContext, packet.getPointer());
    if (retval < 0 && retval != -11 && retval != AVERROR_EOF) {
        throw new FFmpegException("Decode packet failed! avcodec_send_packet returned " + retval);
    }

    AVFrame frame = Avutil55Library.INSTANCE.av_frame_alloc();
    retval = Avcodec57Library.INSTANCE.avcodec_receive_frame(codecContext, frame);
    if (retval < 0 && retval != AVERROR_EOF) {
        throw new FFmpegException("Decode packet failed! avcodec_receive_frame returned " + retval);
    }
    frame.read();

    updatePosition(frame);

    fixFrameArgs(frame);

    long destNumSamplesPerChannel =
            Avutil55Library.INSTANCE.av_rescale_rnd(
                    Swresample2Library.INSTANCE.swr_get_delay(swrContext, frame.sample_rate) + frame.nb_samples,
                    frame.sample_rate, frame.sample_rate, 0);
    PointerByReference tempPtr = new PointerByReference(audioBuffer);
    int numSamples = Swresample2Library.INSTANCE.swr_convert(swrContext, tempPtr,
            (int) destNumSamplesPerChannel, frame.data, frame.nb_samples);
    int dataSize = frame.channels * numSamples * Avutil55Library.INSTANCE.av_get_bytes_per_sample(sampleFormat);
    Avutil55Library.INSTANCE.av_frame_free(new PointerByReference(frame.getPointer()));

    audioBuffer = tempPtr.getValue();
    if (buf != null && buf.length == dataSize) {
        audioBuffer.read(0, buf, 0, dataSize);
        return buf;
    }
    return audioBuffer.getByteArray(0, dataSize);
}
项目:BIMplatform    文件:IfcEngine.java   
/**
 * Returns 'meta' information from a specific instance.
 * 
 * @param instance
 *            A numeric instanceID that uniquely identifies an instance.
 * @param className
 *            as used in the SPFF file.
 * @param classNameUC
 *            UPPER CASE version of the class name (as used in the schema).
 * @return
 */
public int engiGetInstanceMetaInfo(Pointer instance, String className, String classNameUC) {
    IntByReference localIdRef = new IntByReference();
    PointerByReference classNameRef = new PointerByReference();
    PointerByReference classNameUCRef = new PointerByReference();
    engine.engiGetInstanceMetaInfo(instance, localIdRef, classNameRef, classNameUCRef);
    className = classNameRef.getValue().getString(0);
    classNameUC = classNameUCRef.getValue().getString(0);
    return localIdRef.getValue();
}
项目:Sikulix2tesseract    文件:TessAPI.java   
/**
 * Deallocates the memory block occupied by text array.
 *
 * @param arr text array pointer reference
 */
void TessDeleteTextArray(PointerByReference arr);
项目:chromaprint-jna    文件:ChromaprintLibrary.java   
/**
 * Return the length of the current raw fingerprint.<br>
 * @param[in] ctx Chromaprint context pointer<br>
 * @param[out] size number of items in the current raw fingerprint<br>
 * @return 0 on error, 1 on success<br>
 * Original signature : <code>int chromaprint_get_raw_fingerprint_size(ChromaprintContext*, int*)</code><br>
 * <i>native declaration : Downloads/chromaprint-1.4.2/src/chromaprint.h:265</i>
 */
int chromaprint_get_raw_fingerprint_size(PointerByReference ctx, IntByReference size);
项目:Sikulix2tesseract    文件:TessAPI1.java   
/**
 * Get the given level kind of components (block, textline, word etc.) as a
 * Leptonica-style <code>Boxa</code>, <code>Pixa</code> pair, in reading
 * order. Can be called before or after <code>Recognize</code>. If <code>blockids</code>
 * is not <code>NULL</code>, the block-id of each component is also returned
 * as an array of one element per component. delete [] after use. If
 * <code>text_only</code> is true, then only text components are returned.
 * Helper function to get binary images with no padding (most common usage).
 *
 * @param handle the TesseractAPI instance
 * @param level PageIteratorLevel
 * @param text_only
 * @param pixa array of Pix
 * @param blockids
 * @return array of Box
 */
public static native Boxa TessBaseAPIGetComponentImages(TessBaseAPI handle, int level, int text_only, PointerByReference pixa, PointerByReference blockids);
项目:Laplacian    文件:Avcodec57Library.java   
/**
 * Parse a packet.<br>
 * @param s             parser context.<br>
 * @param avctx         codec context.<br>
 * @param poutbuf       set to pointer to parsed buffer or NULL if not yet finished.<br>
 * @param poutbuf_size  set to size of parsed buffer or zero if not yet finished.<br>
 * @param buf           input buffer.<br>
 * @param buf_size      buffer size in bytes without the padding. I.e. the full buffer<br>
 * size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE.<br>
 * To signal EOF, this should be 0 (so that the last frame<br>
 * can be output).<br>
 * @param pts           input presentation timestamp.<br>
 * @param dts           input decoding timestamp.<br>
 * @param pos           input byte position in stream.<br>
 * @return the number of bytes of the input bitstream used.<br>
 * Example:<br>
 * @code<br>
 *   while(in_len){<br>
 *       len = av_parser_parse2(myparser, AVCodecContext, &data, &size,<br>
 *                                        in_data, in_len,<br>
 *                                        pts, dts, pos);<br>
 *       in_data += len;<br>
 *       in_len  -= len;<br>
 *       if(size)<br>
 *          decode_frame(data, size);<br>
 *   }<br>
 * @endcode<br>
 * Original signature : <code>int av_parser_parse2(AVCodecParserContext*, AVCodecContext*, uint8_t**, int*, const uint8_t*, int, int64_t, int64_t, int64_t)</code><br>
 * <i>native declaration : .\libavcodec\avcodec.h:1988</i><br>
 * @deprecated use the safer methods {@link #av_parser_parse2(org.ffmpeg.avcodec57.AVCodecParserContext, org.ffmpeg.avcodec57.AVCodecContext, PointerByReference, IntBuffer, byte[], int, long, long, long)} and {@link #av_parser_parse2(org.ffmpeg.avcodec57.AVCodecParserContext, org.ffmpeg.avcodec57.AVCodecContext, PointerByReference, IntByReference, Pointer, int, long, long, long)} instead
 */
@Deprecated 
int av_parser_parse2(AVCodecParserContext s, AVCodecContext avctx, PointerByReference poutbuf, IntByReference poutbuf_size, Pointer buf, int buf_size, long pts, long dts, long pos);
项目:Sikulix2tesseract    文件:TessAPI.java   
/**
 * Get the textlines as a Leptonica-style <code>Boxa</code>,
 * <code>Pixa</code> pair, in reading order. Can be called before or after
 * <code>Recognize</code>. If <code>blockids</code> is not <code>NULL</code>, the
 * block-id of each line is also returned as an array of one element per
 * line. delete [] after use. If <code>paraids</code> is not
 * <code>NULL</code>, the paragraph-id of each line within its block is also
 * returned as an array of one element per line. delete [] after use.<br>
 * Helper method to extract from the thresholded image (most common usage).
 *
 * @param handle the TesseractAPI instance
 * @param pixa array of Pix
 * @param blockids
 * @return array of Box
 */
Boxa TessBaseAPIGetTextlines(TessBaseAPI handle, PointerByReference pixa, PointerByReference blockids);
项目:Laplacian    文件:Avcodec57Library.java   
/**
 * Parse a packet.<br>
 * @param s             parser context.<br>
 * @param avctx         codec context.<br>
 * @param poutbuf       set to pointer to parsed buffer or NULL if not yet finished.<br>
 * @param poutbuf_size  set to size of parsed buffer or zero if not yet finished.<br>
 * @param buf           input buffer.<br>
 * @param buf_size      buffer size in bytes without the padding. I.e. the full buffer<br>
 * size is assumed to be buf_size + AV_INPUT_BUFFER_PADDING_SIZE.<br>
 * To signal EOF, this should be 0 (so that the last frame<br>
 * can be output).<br>
 * @param pts           input presentation timestamp.<br>
 * @param dts           input decoding timestamp.<br>
 * @param pos           input byte position in stream.<br>
 * @return the number of bytes of the input bitstream used.<br>
 * Example:<br>
 * @code<br>
 *   while(in_len){<br>
 *       len = av_parser_parse2(myparser, AVCodecContext, &data, &size,<br>
 *                                        in_data, in_len,<br>
 *                                        pts, dts, pos);<br>
 *       in_data += len;<br>
 *       in_len  -= len;<br>
 *       if(size)<br>
 *          decode_frame(data, size);<br>
 *   }<br>
 * @endcode<br>
 * Original signature : <code>int av_parser_parse2(AVCodecParserContext*, AVCodecContext*, uint8_t**, int*, const uint8_t*, int, int64_t, int64_t, int64_t)</code><br>
 * <i>native declaration : .\libavcodec\avcodec.h:1988</i>
 */
int av_parser_parse2(AVCodecParserContext s, AVCodecContext avctx, PointerByReference poutbuf, IntBuffer poutbuf_size, byte buf[], int buf_size, long pts, long dts, long pos);
项目:chromaprint-jna    文件:ChromaprintLibrary.java   
/**
 * Uncompress and optionally base64-decode an encoded fingerprint<br>
 * The caller is responsible for freeing the returned pointer using<br>
 * chromaprint_dealloc().<br>
 * @param[in] encoded_fp pointer to an encoded fingerprint<br>
 * @param[in] encoded_size size of the encoded fingerprint in bytes<br>
 * @param[out] fp pointer to a pointer, where the decoded raw fingerprint (array<br>
 *        of 32-bit integers) will be stored<br>
 * @param[out] size Number of items in the returned raw fingerprint<br>
 * @param[out] algorithm Chromaprint algorithm version which was used to generate the<br>
 *               raw fingerprint<br>
 * @param[in] base64 Whether the encoded_fp parameter contains binary data or<br>
 *            base64-encoded ASCII data. If 1, it will base64-decode the data<br>
 *            before uncompressing the fingerprint.<br>
 * @return 0 on error, 1 on success<br>
 * Original signature : <code>int chromaprint_decode_fingerprint(const char*, int, uint32_t**, int*, int*, int)</code><br>
 * <i>native declaration : Downloads/chromaprint-1.4.2/src/chromaprint.h:334</i>
 */
int chromaprint_decode_fingerprint(String encoded_fp, int encoded_size, PointerByReference fp, IntBuffer size, IntBuffer algorithm, int base64);
项目:chromaprint-jna    文件:ChromaprintLibrary.java   
/**
 * Compress and optionally base64-encode a raw fingerprint<br>
 * The caller is responsible for freeing the returned pointer using<br>
 * chromaprint_dealloc().<br>
 * @param[in] fp pointer to an array of 32-bit integers representing the raw<br>
 *        fingerprint to be encoded<br>
 * @param[in] size number of items in the raw fingerprint<br>
 * @param[in] algorithm Chromaprint algorithm version which was used to generate the<br>
 *               raw fingerprint<br>
 * @param[out] encoded_fp pointer to a pointer, where the encoded fingerprint will be<br>
 *                stored<br>
 * @param[out] encoded_size size of the encoded fingerprint in bytes<br>
 * @param[in] base64 Whether to return binary data or base64-encoded ASCII data. The<br>
 *            compressed fingerprint will be encoded using base64 with the<br>
 *            URL-safe scheme if you set this parameter to 1. It will return<br>
 *            binary data if it's 0.<br>
 * @return 0 on error, 1 on success<br>
 * Original signature : <code>int chromaprint_encode_fingerprint(const uint32_t*, int, int, char**, int*, int)</code><br>
 * <i>native declaration : Downloads/chromaprint-1.4.2/src/chromaprint.h:313</i>
 */
int chromaprint_encode_fingerprint(int fp[], int size, int algorithm, PointerByReference encoded_fp, IntBuffer encoded_size, int base64);
项目:Laplacian    文件:Avcodec57Library.java   
/**
 * Finalize list of bitstream filters.<br>
 * This function will transform @ref AVBSFList to single @ref AVBSFContext,<br>
 * so the whole chain of bitstream filters can be treated as single filter<br>
 * freshly allocated by av_bsf_alloc().<br>
 * If the call is successful, @ref AVBSFList structure is freed and lst<br>
 * will be set to NULL. In case of failure, caller is responsible for<br>
 * freeing the structure by av_bsf_list_free()<br>
 * @param      lst Filter list structure to be transformed<br>
 * @param[out] bsf Pointer to be set to newly created @ref AVBSFContext structure<br>
 *                 representing the chain of bitstream filters<br>
 * @return >=0 on success, negative AVERROR in case of failure<br>
 * Original signature : <code>int av_bsf_list_finalize(AVBSFList**, AVBSFContext**)</code><br>
 * <i>native declaration : .\libavcodec\avcodec.h:2601</i><br>
 * @deprecated use the safer method {@link #av_bsf_list_finalize(org.ffmpeg.avcodec57.AVBSFList.ByReference[], org.ffmpeg.avcodec57.AVBSFContext.ByReference[])} instead
 */
@Deprecated 
int av_bsf_list_finalize(PointerByReference lst, PointerByReference bsf);
项目:chromaprint-jna    文件:ChromaprintLibrary.java   
/**
 * Send audio data to the fingerprint calculator.<br>
 * @param[in] ctx Chromaprint context pointer<br>
 * @param[in] data raw audio data, should point to an array of 16-bit signed<br>
 *          integers in native byte-order<br>
 * @param[in] size size of the data buffer (in samples)<br>
 * @return 0 on error, 1 on success<br>
 * Original signature : <code>int chromaprint_feed(ChromaprintContext*, const int16_t*, int)</code><br>
 * <i>native declaration : Downloads/chromaprint-1.4.2/src/chromaprint.h:217</i>
 */
int chromaprint_feed(PointerByReference ctx, ShortByReference data, int size);
项目:Laplacian    文件:Swresample2Library.java   
/**
 * Free the given SwrContext and set the pointer to NULL.<br>
 * @param[in] s a pointer to a pointer to Swr context<br>
 * Original signature : <code>void swr_free(SwrContext**)</code><br>
 * <i>native declaration : libswresample\swresample.h:99</i><br>
 * @deprecated use the safer method {@link #swr_free(org.ffmpeg.swresample2.SwrContext.ByReference[])} instead
 */
@Deprecated 
void swr_free(PointerByReference s);
项目:Laplacian    文件:Postproc54Library.java   
/**
 * Original signature : <code>void pp_postprocess(const uint8_t*[3], const int[3], uint8_t*[3], const int[3], int, int, const int8_t*, int, pp_mode*, pp_context*, int)</code><br>
 * <i>native declaration : libpostproc\postprocess.h:5</i><br>
 * @deprecated use the safer methods {@link #pp_postprocess(byte[], int[], ByteBuffer[], int[], int, int, byte[], int, org.ffmpeg.postproc54.Postproc54Library.pp_mode, org.ffmpeg.postproc54.Postproc54Library.pp_context, int)} and {@link #pp_postprocess(PointerByReference, IntByReference, PointerByReference, IntByReference, int, int, Pointer, int, org.ffmpeg.postproc54.Postproc54Library.pp_mode, org.ffmpeg.postproc54.Postproc54Library.pp_context, int)} instead
 */
@Deprecated 
void pp_postprocess(PointerByReference src, IntByReference srcStride, PointerByReference dst, IntByReference dstStride, int horizontalSize, int verticalSize, Pointer QP_store, int QP_stride, pp_mode mode, pp_context ppContext, int pict_type);
项目:Laplacian    文件:Avdevice57Library.java   
/**
 * Free resources created by avdevice_capabilities_create()<br>
 * @param caps Device capabilities data to be freed.<br>
 * @param s    Context of the device.<br>
 * Original signature : <code>void avdevice_capabilities_free(AVDeviceCapabilitiesQuery**, AVFormatContext*)</code><br>
 * <i>native declaration : libavdevice\avdevice.h:159</i><br>
 * @deprecated use the safer method {@link #avdevice_capabilities_free(org.ffmpeg.avdevice57.AVDeviceCapabilitiesQuery.ByReference[], org.ffmpeg.avformat57.AVFormatContext)} instead
 */
@Deprecated 
void avdevice_capabilities_free(PointerByReference caps, AVFormatContext s);
项目:Laplacian    文件:Avdevice57Library.java   
/**
 * List devices.<br>
 * Returns available device names and their parameters.<br>
 * @note: Some devices may accept system-dependent device names that cannot be<br>
 *        autodetected. The list returned by this function cannot be assumed to<br>
 *        be always completed.<br>
 * @param s                device context.<br>
 * @param[out] device_list list of autodetected devices.<br>
 * @return count of autodetected devices, negative on error.<br>
 * Original signature : <code>int avdevice_list_devices(AVFormatContext*, AVDeviceInfoList**)</code><br>
 * <i>native declaration : libavdevice\avdevice.h:182</i><br>
 * @deprecated use the safer method {@link #avdevice_list_devices(org.ffmpeg.avformat57.AVFormatContext, org.ffmpeg.avdevice57.AVDeviceInfoList.ByReference[])} instead
 */
@Deprecated 
int avdevice_list_devices(AVFormatContext s, PointerByReference device_list);
项目:chromaprint-jna    文件:ChromaprintLibrary.java   
/**
 * Get the number of channels that is internally used for fingerprinting.<br>
 * @note You normally don't need this. Just set the audio's actual number of channels<br>
 * when calling chromaprint_start() and everything will work. This is only used for<br>
 * certain optimized cases to control the audio source.<br>
 * @param[in] ctx Chromaprint context pointer<br>
 * @return number of channels<br>
 * Original signature : <code>int chromaprint_get_num_channels(ChromaprintContext*)</code><br>
 * <i>native declaration : Downloads/chromaprint-1.4.2/src/chromaprint.h:145</i>
 */
int chromaprint_get_num_channels(PointerByReference ctx);
项目:Laplacian    文件:Avcodec57Library.java   
/**
 * Free list of bitstream filters.<br>
 * @param lst Pointer to pointer returned by av_bsf_list_alloc()<br>
 * Original signature : <code>void av_bsf_list_free(AVBSFList**)</code><br>
 * <i>native declaration : .\libavcodec\avcodec.h:2568</i><br>
 * @deprecated use the safer method {@link #av_bsf_list_free(org.ffmpeg.avcodec57.AVBSFList.ByReference[])} instead
 */
@Deprecated 
void av_bsf_list_free(PointerByReference lst);
项目:Laplacian    文件:Swscale4Library.java   
/**
 * @return -1 if not supported<br>
 * Original signature : <code>int sws_getColorspaceDetails(SwsContext*, int**, int*, int**, int*, int*, int*, int*)</code><br>
 * <i>native declaration : libswscale\swscale.h:144</i><br>
 * @deprecated use the safer methods {@link #sws_getColorspaceDetails(org.ffmpeg.swscale4.Swscale4Library.SwsContext, PointerByReference, IntBuffer, PointerByReference, IntBuffer, IntBuffer, IntBuffer, IntBuffer)} and {@link #sws_getColorspaceDetails(org.ffmpeg.swscale4.Swscale4Library.SwsContext, PointerByReference, IntByReference, PointerByReference, IntByReference, IntByReference, IntByReference, IntByReference)} instead
 */
@Deprecated 
int sws_getColorspaceDetails(SwsContext c, PointerByReference inv_table, IntByReference srcRange, PointerByReference table, IntByReference dstRange, IntByReference brightness, IntByReference contrast, IntByReference saturation);
项目:Sikulix2tesseract    文件:TessAPI.java   
/**
 * Get the given level kind of components (block, textline, word etc.) as a
 * Leptonica-style <code>Boxa</code>, <code>Pixa</code> pair, in reading
 * order. Can be called before or after <code>Recognize</code>. If <code>blockids</code>
 * is not <code>NULL</code>, the block-id of each component is also returned
 * as an array of one element per component. delete [] after use. If
 * <code>text_only</code> is true, then only text components are returned.
 * Helper function to get binary images with no padding (most common usage).
 *
 * @param handle the TesseractAPI instance
 * @param level PageIteratorLevel
 * @param text_only
 * @param pixa array of Pix
 * @param blockids
 * @return array of Box
 */
Boxa TessBaseAPIGetComponentImages(TessBaseAPI handle, int level, int text_only, PointerByReference pixa, PointerByReference blockids);
项目:Laplacian    文件:Avformat57Library.java   
/**
 * Open directory for reading.<br>
 * @param s       directory read context. Pointer to a NULL pointer must be passed.<br>
 * @param url     directory to be listed.<br>
 * @param options A dictionary filled with protocol-private options. On return<br>
 *                this parameter will be destroyed and replaced with a dictionary<br>
 *                containing options that were not found. May be NULL.<br>
 * @return >=0 on success or negative on error.<br>
 * Original signature : <code>int avio_open_dir(AVIODirContext**, const char*, AVDictionary**)</code><br>
 * <i>native declaration : libavformat\avio.h:174</i><br>
 * @deprecated use the safer methods {@link #avio_open_dir(org.ffmpeg.avformat57.AVIODirContext.ByReference[], String, org.ffmpeg.avutil55.AVDictionary.ByReference[])} and {@link #avio_open_dir(org.ffmpeg.avformat57.AVIODirContext.ByReference[], Pointer, org.ffmpeg.avutil55.AVDictionary.ByReference[])} instead
 */
@Deprecated 
int avio_open_dir(PointerByReference s, Pointer url, PointerByReference options);
项目:Laplacian    文件:Avformat57Library.java   
/**
 * Close directory.<br>
 * @note Entries created using avio_read_dir() are not deleted and must be<br>
 * freeded with avio_free_directory_entry().<br>
 * @param s         directory read context.<br>
 * @return >=0 on success or negative on error.<br>
 * Original signature : <code>int avio_close_dir(AVIODirContext**)</code><br>
 * <i>native declaration : libavformat\avio.h:194</i><br>
 * @deprecated use the safer method {@link #avio_close_dir(org.ffmpeg.avformat57.AVIODirContext.ByReference[])} instead
 */
@Deprecated 
int avio_close_dir(PointerByReference s);
项目:Laplacian    文件:Avformat57Library.java   
/**
 * Free entry allocated by avio_read_dir().<br>
 * @param entry entry to be freed.<br>
 * Original signature : <code>void avio_free_directory_entry(AVIODirEntry**)</code><br>
 * <i>native declaration : libavformat\avio.h:200</i><br>
 * @deprecated use the safer method {@link #avio_free_directory_entry(org.ffmpeg.avformat57.AVIODirEntry.ByReference[])} instead
 */
@Deprecated 
void avio_free_directory_entry(PointerByReference entry);
项目:Sikulix2tesseract    文件:TessAPI1.java   
/**
 * Get textlines and strips of image regions as a Leptonica-style
 * <code>Boxa</code>, <code>Pixa</code> pair, in reading order. Enables
 * downstream handling of non-rectangular regions. Can be called before or
 * after <code>Recognize</code>. If <code>blockids</code> is not NULL, the block-id of
 * each line is also returned as an array of one element per line. delete []
 * after use.
 *
 * @param handle the TesseractAPI instance
 * @param pixa array of Pix
 * @param blockids
 * @return array of Box
 */
public static native Boxa TessBaseAPIGetStrips(TessBaseAPI handle, PointerByReference pixa, PointerByReference blockids);
项目:Laplacian    文件:Avformat57Library.java   
/**
 * Close the resource accessed by the AVIOContext *s, free it<br>
 * and set the pointer pointing to it to NULL.<br>
 * This function can only be used if s was opened by avio_open().<br>
 * The internal buffer is automatically flushed before closing the<br>
 * resource.<br>
 * @return 0 on success, an AVERROR < 0 on error.<br>
 * @see avio_close<br>
 * Original signature : <code>int avio_closep(AVIOContext**)</code><br>
 * <i>native declaration : libavformat\avio.h:433</i><br>
 * @deprecated use the safer method {@link #avio_closep(org.ffmpeg.avformat57.AVIOContext.ByReference[])} instead
 */
@Deprecated 
int avio_closep(PointerByReference s);
项目:Sikulix2tesseract    文件:TessAPI.java   
/**
 * Gets the individual connected (text) components (created after pages
 * segmentation step, but before recognition) as a Leptonica-style
 * <code>Boxa</code>, <code>Pixa</code> pair, in reading order. Can be
 * called before or after <code>Recognize</code>.
 *
 * @param handle the TesseractAPI instance
 * @param cc array of Pix
 * @return array of Box
 */
Boxa TessBaseAPIGetConnectedComponents(TessBaseAPI handle, PointerByReference cc);