Commit c911b895 authored by 15김민규's avatar 15김민규

initial commit

parents
Pipeline #46 failed with stages
in 1 minute and 11 seconds
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding" addBOMForNewFiles="with NO BOM" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<compositeConfiguration>
<compositeBuild compositeDefinitionSource="SCRIPT" />
</compositeConfiguration>
<option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="resolveModulePerSourceSet" value="false" />
</GradleProjectSettings>
</option>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" />
</component>
<component name="ProjectType">
<option name="id" value="Android" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RunConfigurationProducerService">
<option name="ignoredProducers">
<set>
<option value="org.jetbrains.plugins.gradle.execution.test.runner.AllInPackageGradleConfigurationProducer" />
<option value="org.jetbrains.plugins.gradle.execution.test.runner.TestClassGradleConfigurationProducer" />
<option value="org.jetbrains.plugins.gradle.execution.test.runner.TestMethodGradleConfigurationProducer" />
</set>
</option>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
apply plugin: 'com.android.application'
android {
compileSdkVersion 28
defaultConfig {
applicationId "com.kerbol.eeloo"
minSdkVersion 18
targetSdkVersion 28
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'com.android.support:appcompat-v7:28.0.0'
implementation 'com.android.support.constraint:constraint-layout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.2'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
}
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
package com.kerbol.eeloo;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("com.kerbol.eeloo", appContext.getPackageName());
}
}
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.kerbol.eeloo">
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme">
<activity android:name=".MainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
\ No newline at end of file
package com.kerbol.eeloo;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.Button;
import android.widget.TextView;
import android.view.View;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
findViewById(R.id.btnIncR).setOnClickListener(new Button.OnClickListener() {
@Override
public void onClick(View v) {
radius_ += 0.01;
updateLabels();
}
});
findViewById(R.id.btnDecR).setOnClickListener(new Button.OnClickListener() {
@Override
public void onClick(View v) {
radius_ -= 0.01;
updateLabels();
}
});
findViewById(R.id.btnIncTheta).setOnClickListener(new Button.OnClickListener() {
@Override
public void onClick(View v) {
theta_ += 0.01;
updateLabels();
}
});
findViewById(R.id.btnDecTheta).setOnClickListener(new Button.OnClickListener() {
@Override
public void onClick(View v) {
theta_ -= 0.01;
updateLabels();
}
});
findViewById(R.id.btnIncRho).setOnClickListener(new Button.OnClickListener() {
@Override
public void onClick(View v) {
rho_ += 0.01;
updateLabels();
}
});
findViewById(R.id.btnDecRho).setOnClickListener(new Button.OnClickListener() {
@Override
public void onClick(View v) {
rho_ -= 0.01;
updateLabels();
}
});
}
private void updateLabels()
{
((TextView)findViewById(R.id.txtR)).setText("r : " + radius_);
((TextView)findViewById(R.id.txtTheta)).setText("theta : " + theta_);
((TextView)findViewById(R.id.txtRho)).setText("rho : " + rho_);
}
private double radius_ = 1;
private double theta_ = 1;
private double rho_ = 1;
}
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// !!!! THIS FILE IS GENERATED AUTOMATICALLY, DO NOT CHANGE IT !!!!
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
/*
* ARDroneGeneratedCommandIn.h
* ARDroneEngine
*
* Automatically generated.
* Copyright 2011 Parrot SA. All rights reserved
*
*/
#ifdef COMMAND_IN_CONFIG_KEY
#ifdef COMMAND_IN_CONFIG_KEY_STRING
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_VIDEO_ENABLE, video_enable, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_CONTROL_LEVEL, control_level, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_EULER_ANGLE_MAX, euler_angle_max, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_ALTITUDE_MAX, altitude_max, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_ALTITUDE_MIN, altitude_min, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_CONTROL_IPHONE_TILT, control_iphone_tilt, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_CONTROL_VZ_MAX, control_vz_max, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_CONTROL_YAW, control_yaw, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_OUTDOOR, outdoor, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_FLIGHT_WITHOUT_SHELL, flight_without_shell, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_AUTONOMOUS_FLIGHT, autonomous_flight, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_MANUAL_TRIM, manual_trim, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_INDOOR_EULER_ANGLE_MAX, indoor_euler_angle_max, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_INDOOR_CONTROL_VZ_MAX, indoor_control_vz_max, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_INDOOR_CONTROL_YAW, indoor_control_yaw, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_OUTDOOR_EULER_ANGLE_MAX, outdoor_euler_angle_max, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_OUTDOOR_CONTROL_VZ_MAX, outdoor_control_vz_max, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_OUTDOOR_CONTROL_YAW, outdoor_control_yaw, float )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_FLYING_MODE, flying_mode, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_HOVERING_RANGE, hovering_range, int )
COMMAND_IN_CONFIG_KEY_STRING (ARDRONE_CONFIG_KEY_FLIGHT_ANIM, flight_anim )
COMMAND_IN_CONFIG_KEY_STRING (ARDRONE_CONFIG_KEY_TRAVELLING_MODE, travelling_mode )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_TRAVELLING_ENABLE, travelling_enable, int )
COMMAND_IN_CONFIG_KEY_STRING (ARDRONE_CONFIG_KEY_SSID_SINGLE_PLAYER, ssid_single_player )
COMMAND_IN_CONFIG_KEY_STRING (ARDRONE_CONFIG_KEY_SSID_MULTI_PLAYER, ssid_multi_player )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_WIFI_MODE, wifi_mode, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_WIFI_RATE, wifi_rate, int )
COMMAND_IN_CONFIG_KEY_STRING (ARDRONE_CONFIG_KEY_OWNER_MAC, owner_mac )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_CODEC_FPS, codec_fps, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_VIDEO_CODEC, video_codec, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_VIDEO_SLICES, video_slices, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_VIDEO_LIVE_SOCKET, video_live_socket, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_BITRATE, bitrate, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_MAX_BITRATE, max_bitrate, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_BITRATE_CTRL_MODE, bitrate_ctrl_mode, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_BITRATE_STORAGE, bitrate_storage, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_VIDEO_CHANNEL, video_channel, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_VIDEO_ON_USB, video_on_usb, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_VIDEO_FILE_INDEX, video_file_index, int )
COMMAND_IN_CONFIG_KEY_STRING (ARDRONE_CONFIG_KEY_LEDS_ANIM, leds_anim )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_ENEMY_COLORS, enemy_colors, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_GROUNDSTRIPE_COLORS, groundstripe_colors, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_ENEMY_WITHOUT_SHELL, enemy_without_shell, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_DETECT_TYPE, detect_type, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_DETECTIONS_SELECT_H, detections_select_h, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_DETECTIONS_SELECT_V_HSYNC, detections_select_v_hsync, int )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_DETECTIONS_SELECT_V, detections_select_v, int )
COMMAND_IN_CONFIG_KEY_STRING (ARDRONE_CONFIG_KEY_USERBOX_CMD, userbox_cmd )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_LATITUDE, latitude, double )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_LONGITUDE, longitude, double )
COMMAND_IN_CONFIG_KEY (ARDRONE_CONFIG_KEY_ALTITUDE, altitude, double )
#endif //COMMAND_IN_CONFIG_KEY_STRING
#endif //COMMAND_IN_CONFIG_KEY
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
// !!!! THIS FILE IS GENERATED AUTOMATICALLY, DO NOT CHANGE IT !!!!
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
/*
* ARDroneGeneratedTypes.h
* ARDroneEngine
*
* Automatically generated.
* Copyright 2010 Parrot SA. All rights reserved.
*
*/
#ifndef _ARDRONE_GENERATED_TYPES_H_
#define _ARDRONE_GENERATED_TYPES_H_
#define ARDRONE_MAX_ENEMIES 4
typedef enum {
ARDRONE_LED_ANIMATION_BLINK_GREEN_RED,
ARDRONE_LED_ANIMATION_BLINK_GREEN,
ARDRONE_LED_ANIMATION_BLINK_RED,
ARDRONE_LED_ANIMATION_BLINK_ORANGE,
ARDRONE_LED_ANIMATION_SNAKE_GREEN_RED,
ARDRONE_LED_ANIMATION_FIRE,
ARDRONE_LED_ANIMATION_STANDARD,
ARDRONE_LED_ANIMATION_RED,
ARDRONE_LED_ANIMATION_GREEN,
ARDRONE_LED_ANIMATION_RED_SNAKE,
ARDRONE_LED_ANIMATION_BLANK,
ARDRONE_LED_ANIMATION_RIGHT_MISSILE,
ARDRONE_LED_ANIMATION_LEFT_MISSILE,
ARDRONE_LED_ANIMATION_DOUBLE_MISSILE,
ARDRONE_LED_ANIMATION_FRONT_LEFT_GREEN_OTHERS_RED,
ARDRONE_LED_ANIMATION_FRONT_RIGHT_GREEN_OTHERS_RED,
ARDRONE_LED_ANIMATION_REAR_RIGHT_GREEN_OTHERS_RED,
ARDRONE_LED_ANIMATION_REAR_LEFT_GREEN_OTHERS_RED,
ARDRONE_LED_ANIMATION_LEFT_GREEN_RIGHT_RED,
ARDRONE_LED_ANIMATION_LEFT_RED_RIGHT_GREEN,
ARDRONE_LED_ANIMATION_BLINK_STANDARD,
} ARDRONE_LED_ANIMATION;
typedef enum {
ARDRONE_ANIMATION_PHI_M30_DEG=0,
ARDRONE_ANIMATION_PHI_30_DEG,
ARDRONE_ANIMATION_THETA_M30_DEG,
ARDRONE_ANIMATION_THETA_30_DEG,
ARDRONE_ANIMATION_THETA_20DEG_YAW_200DEG,
ARDRONE_ANIMATION_THETA_20DEG_YAW_M200DEG,
ARDRONE_ANIMATION_TURNAROUND,
ARDRONE_ANIMATION_TURNAROUND_GODOWN,
ARDRONE_ANIMATION_YAW_SHAKE,
ARDRONE_ANIMATION_YAW_DANCE,
ARDRONE_ANIMATION_PHI_DANCE,
ARDRONE_ANIMATION_THETA_DANCE,
ARDRONE_ANIMATION_VZ_DANCE,
ARDRONE_ANIMATION_WAVE,
ARDRONE_ANIMATION_PHI_THETA_MIXED,
ARDRONE_ANIMATION_DOUBLE_PHI_THETA_MIXED,
ARDRONE_ANIMATION_FLIP_AHEAD,
ARDRONE_ANIMATION_FLIP_BEHIND,
ARDRONE_ANIMATION_FLIP_LEFT,
ARDRONE_ANIMATION_FLIP_RIGHT,
} ARDRONE_ANIMATION;
typedef enum {
ARDRONE_CAMERA_DETECTION_HORIZONTAL=0,
ARDRONE_CAMERA_DETECTION_VERTICAL,
ARDRONE_CAMERA_DETECTION_VISION,
ARDRONE_CAMERA_DETECTION_NONE,
ARDRONE_CAMERA_DETECTION_COCARDE,
ARDRONE_CAMERA_DETECTION_ORIENTED_COCARDE,
ARDRONE_CAMERA_DETECTION_STRIPE,
ARDRONE_CAMERA_DETECTION_H_COCARDE,
ARDRONE_CAMERA_DETECTION_H_ORIENTED_COCARDE,
ARDRONE_CAMERA_DETECTION_STRIPE_V,
ARDRONE_CAMERA_DETECTION_MULTIPLE_DETECTION_MODE,
ARDRONE_CAMERA_DETECTION_CAP,
ARDRONE_CAMERA_DETECTION_ORIENTED_COCARDE_BW,
ARDRONE_CAMERA_DETECTION_VISION_V2,
ARDRONE_CAMERA_DETECTION_TOWER_SIDE,
ARDRONE_CAMERA_DETECTION_NUM,
} ARDRONE_CAMERA_DETECTION_TYPE;
typedef enum {
ARDRONE_VIDEO_CHANNEL_FIRST=0,
ARDRONE_VIDEO_CHANNEL_HORI=ARDRONE_VIDEO_CHANNEL_FIRST,
ARDRONE_VIDEO_CHANNEL_VERT,
ARDRONE_VIDEO_CHANNEL_LARGE_HORI_SMALL_VERT,
ARDRONE_VIDEO_CHANNEL_LARGE_VERT_SMALL_HORI,
ARDRONE_VIDEO_CHANNEL_LAST=ARDRONE_VIDEO_CHANNEL_LARGE_VERT_SMALL_HORI,
ARDRONE_VIDEO_CHANNEL_NEXT,
} ARDRONE_VIDEO_CHANNEL;
typedef enum {
ARDRONE_VARIABLE_BITRATE_MODE_DISABLED=0,
ARDRONE_VARIABLE_BITRATE_MODE_DYNAMIC,
ARDRONE_VARIABLE_BITRATE_MANUAL
} ARDRONE_VARIABLE_BITRATE;
typedef enum {
ARDRONE_ENEMY_COLOR_ORANGE_GREEN=1,
ARDRONE_ENEMY_COLOR_ORANGE_YELLOW,
ARDRONE_ENEMY_COLOR_ORANGE_BLUE,
ARDRONE_ENEMY_COLOR_ARRACE_FINISH_LINE=0x10,
ARDRONE_ENEMY_COLOR_ARRACE_DONUT=0x11
} ARDRONE_ENEMY_COLOR;
typedef enum {
ARDRONE_CONFIG_KEY_VIDEO_ENABLE,//BOOLEAN
ARDRONE_CONFIG_KEY_CONTROL_LEVEL,//INT
ARDRONE_CONFIG_KEY_EULER_ANGLE_MAX,//FLOAT
ARDRONE_CONFIG_KEY_ALTITUDE_MAX,//INT
ARDRONE_CONFIG_KEY_ALTITUDE_MIN,//INT
ARDRONE_CONFIG_KEY_CONTROL_IPHONE_TILT,//FLOAT
ARDRONE_CONFIG_KEY_CONTROL_VZ_MAX,//FLOAT
ARDRONE_CONFIG_KEY_CONTROL_YAW,//FLOAT
ARDRONE_CONFIG_KEY_OUTDOOR,//BOOLEAN
ARDRONE_CONFIG_KEY_FLIGHT_WITHOUT_SHELL,//BOOLEAN
ARDRONE_CONFIG_KEY_AUTONOMOUS_FLIGHT,//BOOLEAN
ARDRONE_CONFIG_KEY_MANUAL_TRIM,//BOOLEAN
ARDRONE_CONFIG_KEY_INDOOR_EULER_ANGLE_MAX,//FLOAT
ARDRONE_CONFIG_KEY_INDOOR_CONTROL_VZ_MAX,//FLOAT
ARDRONE_CONFIG_KEY_INDOOR_CONTROL_YAW,//FLOAT
ARDRONE_CONFIG_KEY_OUTDOOR_EULER_ANGLE_MAX,//FLOAT
ARDRONE_CONFIG_KEY_OUTDOOR_CONTROL_VZ_MAX,//FLOAT
ARDRONE_CONFIG_KEY_OUTDOOR_CONTROL_YAW,//FLOAT
ARDRONE_CONFIG_KEY_FLYING_MODE,//INT
ARDRONE_CONFIG_KEY_HOVERING_RANGE,//INT
ARDRONE_CONFIG_KEY_FLIGHT_ANIM,//STRING
ARDRONE_CONFIG_KEY_TRAVELLING_MODE,//STRING
ARDRONE_CONFIG_KEY_TRAVELLING_ENABLE,//BOOLEAN
ARDRONE_CONFIG_KEY_SSID_SINGLE_PLAYER,//STRING
ARDRONE_CONFIG_KEY_SSID_MULTI_PLAYER,//STRING
ARDRONE_CONFIG_KEY_WIFI_MODE,//INT
ARDRONE_CONFIG_KEY_WIFI_RATE,//INT
ARDRONE_CONFIG_KEY_OWNER_MAC,//STRING
ARDRONE_CONFIG_KEY_CODEC_FPS,//INT
ARDRONE_CONFIG_KEY_VIDEO_CODEC,//INT
ARDRONE_CONFIG_KEY_VIDEO_SLICES,//INT
ARDRONE_CONFIG_KEY_VIDEO_LIVE_SOCKET,//INT
ARDRONE_CONFIG_KEY_BITRATE,//INT
ARDRONE_CONFIG_KEY_MAX_BITRATE,//INT
ARDRONE_CONFIG_KEY_BITRATE_CTRL_MODE,//INT
ARDRONE_CONFIG_KEY_BITRATE_STORAGE,//INT
ARDRONE_CONFIG_KEY_VIDEO_CHANNEL,//INT
ARDRONE_CONFIG_KEY_VIDEO_ON_USB,//BOOLEAN
ARDRONE_CONFIG_KEY_VIDEO_FILE_INDEX,//INT
ARDRONE_CONFIG_KEY_LEDS_ANIM,//STRING
ARDRONE_CONFIG_KEY_ENEMY_COLORS,//INT
ARDRONE_CONFIG_KEY_GROUNDSTRIPE_COLORS,//INT
ARDRONE_CONFIG_KEY_ENEMY_WITHOUT_SHELL,//INT
ARDRONE_CONFIG_KEY_DETECT_TYPE,//INT
ARDRONE_CONFIG_KEY_DETECTIONS_SELECT_H,//INT
ARDRONE_CONFIG_KEY_DETECTIONS_SELECT_V_HSYNC,//INT
ARDRONE_CONFIG_KEY_DETECTIONS_SELECT_V,//INT
ARDRONE_CONFIG_KEY_USERBOX_CMD,//STRING
ARDRONE_CONFIG_KEY_LATITUDE,//DOUBLE
ARDRONE_CONFIG_KEY_LONGITUDE,//DOUBLE
ARDRONE_CONFIG_KEY_ALTITUDE,//DOUBLE
} ARDRONE_CONFIG_KEYS;
typedef enum {
ARDRONE_VIDEO_CODEC_UVLC = 0x20,
ARDRONE_VIDEO_CODEC_P264 = 0x40,
MP4_ARDRONE_VIDEO_CODEC_360P = 0x80,
H264_ARDRONE_VIDEO_CODEC_360P = 0x81,
MP4_360P_H264_ARDRONE_VIDEO_CODEC_720P = 0x82,
H264_ARDRONE_VIDEO_CODEC_720P = 0x83,
MP4_360P_ARDRONE_VIDEO_CODEC_SLRS = 0x84,
H264_360P_ARDRONE_VIDEO_CODEC_SLRS = 0x85,
H264_720P_ARDRONE_VIDEO_CODEC_SLRS = 0x86,
H264_AUTO_ARDRONE_VIDEO_CODEC_RESIZE = 0x87,
} ARDRONE_VIDEO_CODEC;
typedef enum {
ARDRONE_FLYING_MODE_FREE_FLIGHT=0,
ARDRONE_FLYING_MODE_HOVER_ON_TOP_OF_ROUNDEL=1<<0,
ARDRONE_FLYING_MODE_HOVER_ON_TOP_OF_ORIENTED_ROUNDEL=1<<1,
} ARDRONE_FLYING_MODE;
typedef enum {
ARDRONE_ACADEMY_STATE_NONE,
ARDRONE_ACADEMY_STATE_CONNECTION,
ARDRONE_ACADEMY_STATE_PREPARE_PROCESS,
ARDRONE_ACADEMY_STATE_PROCESS,
ARDRONE_ACADEMY_STATE_FINISH_PROCESS,
ARDRONE_ACADEMY_STATE_DISCONNECTION,
ARDRONE_ACADEMY_STATE_MAX,
} ARDRONE_ACADEMY_STATE;
typedef enum {
ARDRONE_ACADEMY_RESULT_NONE,
ARDRONE_ACADEMY_RESULT_OK,
ARDRONE_ACADEMY_RESULT_FAILED,
} ARDRONE_ACADEMY_RESULT;
typedef enum {
ARDRONE_FLYING_STATE_LANDED=0,
ARDRONE_FLYING_STATE_FLYING,
ARDRONE_FLYING_STATE_TAKING_OFF,
ARDRONE_FLYING_STATE_LANDING,
} ARDRONE_FLYING_STATE;
#endif // _ARDRONE_GENERATED_TYPES_H_
/*
* ARDroneTypes.h
* ARDroneEngine
*
* Created by Frédéric D'HAEYER on 21/05/10.
* Copyright 2010 Parrot SA. All rights reserved.
*
*/
#ifndef _ARDRONE_TYPES_H_
#define _ARDRONE_TYPES_H_
#include "ARDroneGeneratedTypes.h"
#define ARDRONE_ADDRESS_SIZE 128
#define ARDRONE_VERSION_SIZE 128
#define HELVETICA @"HelveticaNeue-CondensedBold"
#define WHITE(a) [UIColor colorWithWhite:1.f alpha:(a)]
#define BLACK(a) [UIColor colorWithWhite:0.f alpha:(a)]
#define ORANGE(a) [UIColor colorWithRed:255.f/255.f green:120.f/255.f blue:0.f/255.f alpha:(a)]
// Attribute index.
enum {
ARDRONE_ATTRIB_POSITION,
ARDRONE_ATTRIB_TEXCOORD,
ARDRONE_NUM_ATTRIBUTES
};
/**
* Define the command identifiers from drone to Game Engine
*/
typedef enum {
ARDRONE_COMMAND_RUN,
ARDRONE_COMMAND_PAUSE,
ARDRONE_COMMAND_FIRE,
} ARDRONE_COMMAND_OUT;
/**
* Define the command identifiers from Game Engine to drone
*/
typedef enum {
ARDRONE_COMMAND_ISCLIENT, // Command to set if the multiplayer is client
ARDRONE_COMMAND_DRONE_ANIM, // Command to set a drone animation
ARDRONE_COMMAND_DRONE_LED_ANIM, // Command to set a drone led animation
ARDRONE_COMMAND_SET_CONFIG, // Command to set a drone configuration key
ARDRONE_COMMAND_ENABLE_COMBINED_YAW,// Command to enable / disable combined yaw command.
ARDRONE_COMMAND_VIDEO_CHANNEL, // Command to set the channel of video -- DEPRECATED
ARDRONE_COMMAND_CAMERA_DETECTION, // Command to set camera type for detection. -- DEPRECATED
ARDRONE_COMMAND_ENEMY_SET_PARAM, // Command to set enemy parameter for detection (color and hull). -- DEPRECATED
ARDRONE_COMMAND_SET_FLY_MODE, // Command to change flying mode -- DEPRECATED
} ARDRONE_COMMAND_IN;
typedef void (*command_in_configuration_callback)(int result);
/* Define ARDrone information structure */
typedef struct
{
char drone_address[ARDRONE_ADDRESS_SIZE];
char drone_version[ARDRONE_VERSION_SIZE];
} ardrone_info_t;
typedef struct
{
ARDRONE_COMMAND_IN command;
command_in_configuration_callback callback;
void *parameter;
} ARDRONE_COMMAND_IN_WITH_PARAM;
typedef struct
{
ARDRONE_ENEMY_COLOR color;
int outdoor_shell; // 1 if enemy has outdoor shell, else 0
} ARDRONE_ENEMY_PARAM;
typedef struct
{
ARDRONE_LED_ANIMATION led_anim;
float frequency;
unsigned int duration;
} ARDRONE_LED_ANIMATION_PARAM;
typedef struct
{
ARDRONE_ANIMATION drone_anim;
int timeout;
} ARDRONE_ANIMATION_PARAM;
/* Comments are used for autogeneration
* Do not modify these !
*/
// MATCH_TYPES : int : int32_t bool_t
// MATCH_TYPES : unsigned int : uint32_t
// MATCH_TYPES : float : float32_t
// MATCH_TYPES : double : float64_t
/* End of autogeneration comments */
typedef struct
{
ARDRONE_CONFIG_KEYS config_key;
void *pvalue;
} ARDRONE_CONFIG_PARAM;
/**
* Define what a 3D vector is
*/
typedef struct
{
float x;
float y;
float z;
}
ARDroneVector3D;
/**
* Define a structure to collect drone's navigation data
*/
typedef struct
{
/**
* Translation speed of the drone, in meters per second
*/
ARDroneVector3D linearVelocity;
/**
* Rotation speed of the drone, in degré
*/
ARDroneVector3D angularPosition;
/**
* Navdata video num frames to synchronized Navdata with video
*/
int navVideoNumFrames;
/**
* Video num frames to synchronized Navdata with video
*/
int videoNumFrames;
/**
* Value indicates drone flying state (see ARDRONE_FLYING_STATE enum)
*/
ARDRONE_FLYING_STATE flyingState;
/**
* int indicates drone is in emergency state (1 if is in emergency, 0 else)
*/
int emergencyState;
/**
* Camera detection type
*/
ARDRONE_CAMERA_DETECTION_TYPE detection_type;
/**
* Number of finish lines detected
*/
unsigned int finishLineCount;
/**
* Number of double taps detected
*/
unsigned int doubleTapCount;
/**
* Tells the application that the ardrone engine is done with initial configuration so the application can send their own configs
* (1 if application can send config, 0 otherwise)
*/
int isInit;
}
ARDroneNavigationData;
/**
* Define a structure to exchange an enemy data
*/
typedef struct
{
/**
* Position of the enemy (between -1.0 and 1.0)
*/
ARDroneVector3D position;
/**
* Size of the enemy (between 0.0 and 2.0)
*/
float height, width;
/**
* Angle of the enemy (between -90.0° and 90.0°)
*/
float orientation_angle;
}
ARDroneEnemyData;
/**
* Define a structure to exchange camera parameters compute by detection
*/
typedef struct
{
/**
* Rotation matrix of camera
*/
float rotation[3][3];
/**
* Translation matrix of camera
*/
float translation[3];
/**
* Index of tag detected
*/
int tag_index;
}
ARDroneDetectionCamera;
/**
* Define a structure to exchange camera parameters compute by drone
*/
typedef struct
{
/**
* Rotation matrix of camera
*/
float rotation[3][3];
/**
* Translation matrix of camera
*/
float translation[3];
}
ARDroneCamera;
/**
* Define a structure to exchange all enemies data
*/
typedef struct
{
/**
* Number of enemies
*/
unsigned int count;
/**
* Pointer to an array that contains the data structure of each enemy
*/
ARDroneEnemyData data[ARDRONE_MAX_ENEMIES];
}
ARDroneEnemiesData;
#endif // _ARDRONE_TYPES_H_
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := sdk
LOCAL_SRC_FILES := libsdk.a
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := pc_ardrone
LOCAL_SRC_FILES := libpc_ardrone.a
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_MODULE := vlib
LOCAL_SRC_FILES := libvlib.a
include $(PREBUILT_STATIC_LIBRARY)
include $(CLEAR_VARS)
LOCAL_CFLAGS += -I$(SDK_PATH)
LOCAL_CFLAGS += -I$(SDK_PATH)/Soft/Common
LOCAL_CFLAGS += -I$(SDK_PATH)/Soft/Lib
LOCAL_CFLAGS += -I$(SDK_PATH)/VLIB
LOCAL_CFLAGS += -I$(SDK_PATH)/VLIB/Platform/arm9
LOCAL_CFLAGS += -I$(SDK_PATH)/VP_SDK
LOCAL_CFLAGS += -I$(SDK_PATH)/VP_SDK/VP_Com/linux
LOCAL_CFLAGS += -I$(SDK_PATH)/VP_SDK/VP_Com
LOCAL_CFLAGS += -I$(SDK_PATH)/VP_SDK/VP_Os
LOCAL_CFLAGS += -I$(SDK_PATH)/VP_SDK/VP_Os/linux
LOCAL_CFLAGS += -I$(SDK_PATH)/VP_SDK/VP_Com/linux
LOCAL_C_INCLUDES:= $(LOCAL_PATH)/../ITTIAM/avc_decoder/includes \
$(LOCAL_PATH)/../ITTIAM/m4v_decoder/includes \
$(LOCAL_PATH)/../FFMPEG/Includes
#LIB_PATH=$(LOCAL_PATH)/../../libs/armeabi
LOCAL_LDLIBS := -llog -lGLESv2 -ljnigraphics
LOCAL_MODULE := adfreeflight
LOCAL_SRC_FILES := app.c \
video_stage_io_file.c \
hardware_capabilites.c \
Controller/ardrone_controller.c \
ControlData.c \
Callbacks/drone_proxy_callbacks.c \
Callbacks/java_callbacks.c \
Plf/plf.c \
Stubs/drone_stub.c \
Stubs/drone_config_stub.c \
Stubs/ftp_client_stub.c \
Stubs/plf_file_stub.c \
Stubs/transcoding_service_stub.c \
Stubs/gl_bg_video_sprite_stub.c \
NavData/nav_data.c \
Video/video_stage_renderer.c \
Video/frame_rate.c \
Controller/virtual_gamepad.c \
Video/opengl_stage.c \
Video/opengl_shader.c
LOCAL_STATIC_LIBRARIES := pc_ardrone vlib sdk ittiam_avc_decoder ittiam_m4v_decoder ittiam_decoder_utils
LOCAL_SHARED_LIBRARIES := AVUTIL-prebuilt AVCODEC-prebuilt SWSCALE-prebuilt AVFILTER-prebuilt AVFORMAT-prebuilt AVDEVICE-prebuilt
LOCAL_CFLAGS += -D__USE_GNU -D__linux__ -DNO_ARDRONE_MAINLOOP -DUSE_ANDROID -DTARGET_CPU_ARM=1 -DTARGET_CPU_X86=0 -DUSE_WIFI -DFFMPEG_SUPPORT -fstack-protector
LOCAL_CFLAGS += -DANDROID_NDK
#LOCAL_LDFLAGS := -Wl,-Map,app.map
include $(BUILD_SHARED_LIBRARY)
/*
* drone_proxy_callback.c
*
* Created on: May 10, 2011
* Author: Dmytro Baryskyy
*/
#include "../common.h"
#include "java_callbacks.h"
#include "drone_proxy_callbacks.h"
static const char* TAG = "DRONE_PROXY_CALLBACK";
void parrot_drone_proxy_onConnected(JNIEnv* env, jobject obj)
{
parrot_java_callbacks_call_void_method(env, obj, "onConnected");
}
void parrot_drone_proxy_onDisconnected(JNIEnv* env, jobject obj)
{
parrot_java_callbacks_call_void_method(env, obj, "onDisconnected");
}
void parrot_drone_proxy_onConnectionFailed(JNIEnv* env, jobject obj, int code)
{
if (env == NULL) {
return;
}
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID mid = (*env)->GetMethodID(env, cls, "onConnectionFailed", "(I)V");
jint jcode = code;
if (mid == 0) {
LOGW(TAG, "Method not found");
return;
}
(*env)->CallVoidMethod(env, obj, mid, jcode);
(*env)->DeleteLocalRef(env, cls);
}
void parrot_drone_proxy_onConfigChanged(JNIEnv* env, jobject obj)
{
parrot_java_callbacks_call_void_method(env, obj, "onConfigChanged");
}
/*
* drone_proxy_callbacks.h
*
* Created on: May 7, 2011
* Author: Dmytro Baryskyy
*/
#ifndef DRONE_PROXY_CALLBACKS_H_
#define DRONE_PROXY_CALLBACKS_H_
// These callbacks are called by the native code and the control will go to the Java
extern void parrot_drone_proxy_onConnected (JNIEnv* env, jobject /*obj*/);
extern void parrot_drone_proxy_onConnectionFailed(JNIEnv* env, jobject /*obj*/, int /*code*/);
extern void parrot_drone_proxy_onDisconnected (JNIEnv* env, jobject /*obj*/);
extern void parrot_drone_proxy_onConfigChanged (JNIEnv* env, jobject /*obj*/);
extern void ardrone_academy_callback_called (const char* mediaPath, bool_t addToQueue);
#endif /* DRONE_PROXY_CALLBACKS_H_ */
/*
* java_callbacks.c
*
* Created on: Jan 31, 2012
* Author: "Dmytro Baryskyy"
*/
#include "common.h"
#include "java_callbacks.h"
static const char* TAG = "JAVA_CALLBACKS";
void parrot_java_callbacks_call_void_method(JNIEnv *env, jobject obj, const char* methodName)
{
if (env == NULL || obj == NULL) {
LOGW(TAG, "env or obj is null");
return;
}
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID mid = (*env)->GetMethodID(env, cls, methodName, "()V");
if (mid == 0) {
LOGW(TAG, "Method not found");
return;
}
(*env)->CallVoidMethod(env, obj, mid);
(*env)->DeleteLocalRef(env, cls);
}
void parrot_java_callbacks_call_void_method_int_int(jobject obj, const char* methodName, int param1, int param2)
{
JNIEnv* env = NULL;
if (g_vm != NULL)
{
(*g_vm)->GetEnv(g_vm, (void **)&env, JNI_VERSION_1_6);
}
if (env == NULL || obj == NULL) {
LOGW(TAG, "env or obj is null");
return;
}
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID mid = (*env)->GetMethodID(env, cls, methodName, "(II)V");
if (mid == 0) {
LOGW(TAG, "Method not found");
return;
}
(*env)->CallVoidMethod(env, obj, mid, param1, param2);
(*env)->DeleteLocalRef(env, cls);
}
void parrot_java_callbacks_call_void_method_string(JNIEnv *env, jobject obj, const char*methodName, const char* param)
{
if (env == NULL || obj == NULL) {
LOGW(TAG, "env or obj is null");
return;
}
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID mid = (*env)->GetMethodID(env, cls, methodName, "(Ljava/lang/String;)V");
if (mid == 0) {
LOGW(TAG, "Method not found");
return;
}
jstring paramUrf8 = (*env)->NewStringUTF(env, param);
(*env)->CallVoidMethod(env, obj, mid, paramUrf8);
(*env)->DeleteLocalRef(env, cls);
}
void parrot_java_callbacks_call_void_method_string_boolean(JNIEnv *env, jobject obj, const char*methodName, const char* param, bool_t param2)
{
if (env == NULL || obj == NULL) {
LOGW(TAG, "env or obj is null");
return;
}
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID mid = (*env)->GetMethodID(env, cls, methodName, "(Ljava/lang/String;Z)V");
if (mid == 0) {
LOGW(TAG, "Method not found");
return;
}
jstring paramUrf8 = (*env)->NewStringUTF(env, param);
jboolean boolJava = param2;
(*env)->CallVoidMethod(env, obj, mid, paramUrf8, boolJava);
(*env)->DeleteLocalRef(env, cls);
}
void java_set_field_int(JNIEnv *env, jobject obj, const char* fieldName, jint value)
{
jclass class = (*env)->GetObjectClass(env, obj);
jfieldID fieldId = (*env)->GetFieldID(env, class, fieldName, "I");
(*env)->SetIntField(env, obj, fieldId, value);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, class);
}
void java_set_field_bool(JNIEnv *env, jobject obj, const char* fieldName, jboolean value)
{
jclass class = (*env)->GetObjectClass(env, obj);
jfieldID fieldId = (*env)->GetFieldID(env, class, fieldName, "Z");
(*env)->SetBooleanField(env, obj, fieldId, value);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, class);
}
jboolean java_get_bool_field_value(JNIEnv *env, jobject obj, const char* fieldName)
{
jclass class = (*env)->GetObjectClass(env, obj);
jfieldID fieldId = (*env)->GetFieldID(env, class, fieldName, "Z");
jboolean value = (*env)->GetBooleanField(env, obj, fieldId);
(*env)->DeleteLocalRef(env, class);
return value;
}
/*
* java_callbacks.h
*
* Created on: Jan 31, 2012
* Author: "Dmytro Baryskyy"
*/
#ifndef JAVA_CALLBACKS_H_
#define JAVA_CALLBACKS_H_
extern void parrot_java_callbacks_call_void_method(JNIEnv *env, jobject obj, const char* methodName);
extern void parrot_java_callbacks_call_void_method_int_int(jobject obj, const char* methodName, int param1, int param2);
extern void parrot_java_callbacks_call_void_method_string(JNIEnv* env, jobject obj, const char*methodName, const char* param);
extern void java_set_field_int(JNIEnv *env, jobject obj, const char* fieldName, jint value);
extern void java_set_field_bool(JNIEnv *env, jobject obj, const char* fieldName, jboolean value);
extern jboolean java_get_bool_field_value(JNIEnv *env, jobject obj, const char* fieldName);
#endif /* JAVA_CALLBACKS_H_ */
/*
* ControlData.m
* ARDroneEngine
*
* Created by Frederic D'HAEYER on 14/01/10.
* Copyright 2010 Parrot SA. All rights reserved.
*
*/
#include "common.h"
#include <ardrone_tool/Navdata/ardrone_academy_navdata.h>
#include <VLIB/video_codec.h>
#include "ControlData.h"
//#define DEBUG_CONTROL
ControlData ctrldata;
static const char* TAG = "ControlData";
void setApplicationDefaultConfig()
{
videoCapabilities vCaps = getDeviceVideoCapabilites ();
#ifdef DEBUG_CONTROL
printDeviceInfos();
#endif
ardrone_application_default_config.navdata_demo = TRUE;
ardrone_application_default_config.navdata_options = (NAVDATA_OPTION_MASK(NAVDATA_DEMO_TAG) | NAVDATA_OPTION_MASK(NAVDATA_VISION_DETECT_TAG) | NAVDATA_OPTION_MASK(NAVDATA_GAMES_TAG) | NAVDATA_OPTION_MASK(NAVDATA_MAGNETO_TAG) | NAVDATA_OPTION_MASK(NAVDATA_HDVIDEO_STREAM_TAG) | NAVDATA_OPTION_MASK(NAVDATA_WIFI_TAG));
if (IS_ARDRONE2)
{
ardrone_application_default_config.codec_fps = vCapsInfo[vCaps].supportedFps;
ardrone_application_default_config.max_bitrate = vCapsInfo[vCaps].supportedBitrate;
ardrone_application_default_config.video_codec = vCapsInfo[vCaps].defaultCodec;
#ifdef DEBUG_CONTROL
LOGD ("CONTROL_DATA", "Device support : %d fps @ %d kbps - codec value : 0x%02x\n", vCapsInfo[vCaps].supportedFps, vCapsInfo[vCaps].supportedBitrate, vCapsInfo[vCaps].defaultCodec);
#endif
ardrone_application_default_config.bitrate_ctrl_mode = ARDRONE_VARIABLE_BITRATE_MODE_DYNAMIC;
} else {
ardrone_application_default_config.video_codec = P264_CODEC;
ardrone_application_default_config.bitrate_ctrl_mode = ARDRONE_VARIABLE_BITRATE_MODE_DYNAMIC;
}
switch (ctrldata.recordingCapability) {
case VIDEO_RECORDING_NOT_SUPPORTED:
ardrone_academy_navdata_set_wifi_record_codec(NULL_CODEC);
break;
case VIDEO_RECORDING_360P:
ardrone_academy_navdata_set_wifi_record_codec(MP4_360P_H264_360P_CODEC);
break;
case VIDEO_CAPABILITIES_720:
ardrone_academy_navdata_set_wifi_record_codec(MP4_360P_H264_720P_CODEC);
break;
}
LOGD(TAG, "setApplicationDefaultConfig [OK]");
}
void initControlData(void)
{
ctrldata.framecounter = 0;
ctrldata.needAnimation = FALSE;
vp_os_memset(ctrldata.needAnimationParam, 0, sizeof(ctrldata.needAnimationParam));
ctrldata.needVideoSwitch = -1;
ctrldata.needLedAnimation = FALSE;
vp_os_memset(ctrldata.needLedAnimationParam, 0, sizeof(ctrldata.needLedAnimationParam));
resetControlData();
ardrone_tool_input_start_reset();
// navdata_write_to_file(FALSE);
ctrldata.navdata_connected = FALSE;
LOGD(TAG, "initControlData [OK]");
}
void resetControlData(void)
{
//printf("reset control data\n");
ctrldata.command_flag = 0;
inputPitch(0.0);
inputRoll(0.0);
inputYaw(0.0);
inputGaz(0.0);
ctrldata.iphone_psi = 0;
ctrldata.iphone_psi_accuracy = 0;
}
void inputYaw(float percent)
{
#ifdef DEBUG_CONTROL
LOGD ("CONTROL_DATA", "%s : %f\n", __FUNCTION__, percent);
#endif
if(-1.0f <= percent && percent <= 1.0f)
ctrldata.yaw = percent;
else if(-1.0f > percent)
ctrldata.yaw = -1.0f;
else
ctrldata.yaw = 1.0f;
}
void inputGaz(float percent)
{
#ifdef DEBUG_CONTROL
LOGD ("CONTROL_DATA", "%s : %f\n", __FUNCTION__, percent);
#endif
if(-1.0f <= percent && percent <= 1.0f)
ctrldata.gaz = percent;
else if(-1.0f > percent)
ctrldata.gaz = -1.0f;
else
ctrldata.gaz = 1.0f;
}
void inputPitch(float percent)
{
#ifdef DEBUG_CONTROL
LOGD ("CONTROL_DATA", "%s : %f, accelero_enable : %d\n", __FUNCTION__, percent, (ctrldata.command_flag >> ARDRONE_PROGRESSIVE_CMD_ENABLE) & 0x1 );
#endif
if(-1.0f <= percent && percent <= 1.0f)
ctrldata.iphone_theta = percent;
else if(-1.0f > percent)
ctrldata.iphone_theta = -1.0f;
else
ctrldata.iphone_theta = 1.0f;
#ifdef DEBUG_CONTROL
LOGD ("CONTROL_DATA", "%s : %f, accelero_enable : %d\n", __FUNCTION__, ctrldata.iphone_theta, (ctrldata.command_flag >> ARDRONE_PROGRESSIVE_CMD_ENABLE) & 0x1 );
#endif
}
void inputRoll(float percent)
{
#ifdef DEBUG_CONTROL
LOGD ("CONTROL_DATA", "%s : %f, accelero_enable : %d\n", __FUNCTION__, percent, (ctrldata.command_flag >> ARDRONE_PROGRESSIVE_CMD_ENABLE) & 0x1);
#endif
if(-1.0f <= percent && percent <= 1.0f)
ctrldata.iphone_phi = percent;
else if(-1.0f > percent)
ctrldata.iphone_phi = -1.0f;
else
ctrldata.iphone_phi = 1.0f;
#ifdef DEBUG_CONTROL
LOGD ("CONTROL_DATA", "%s : %f, accelero_enable : %d\n", __FUNCTION__, ctrldata.iphone_phi, (ctrldata.command_flag >> ARDRONE_PROGRESSIVE_CMD_ENABLE) & 0x1 );
LOGD ("CONTROL_DATA", "ARDRONE_PROGRESSIVE_CMD_ENABLE: %d ARDRONE_PROGRESSIVE_CMD_COMBINED_YAW_ACTIVE : %d\ ", (ctrldata.command_flag >> ARDRONE_PROGRESSIVE_CMD_ENABLE) & 0x1 , (ctrldata.command_flag >> ARDRONE_PROGRESSIVE_CMD_COMBINED_YAW_ACTIVE) & 0x1 );
#endif
}
void set_command_flag(int flag, bool_t enabled)
{
if (enabled == TRUE) {
ctrldata.command_flag |= (1 << flag);
} else {
ctrldata.command_flag &= ~(1 << flag);
}
}
void sendControls(void)
{
ardrone_tool_set_progressive_cmd(ctrldata.command_flag, ctrldata.iphone_phi, ctrldata.iphone_theta, ctrldata.gaz, ctrldata.yaw, ctrldata.iphone_psi, ctrldata.iphone_psi_accuracy);
}
/*
* ControlData.h
* ARDroneEngine
*
* Created by Frederic D'HAEYER on 14/01/10.
* Copyright 2010 Parrot SA. All rights reserved.
*
*/
#ifndef _CONTROLDATA_H_
#define _CONTROLDATA_H_
#include "common.h"
#define SMALL_STRING_SIZE 16
#define MEDIUM_STRING_SIZE 64
typedef enum _CONFIG_STATE_
{
CONFIG_STATE_IDLE,
CONFIG_STATE_NEEDED,
CONFIG_STATE_IN_PROGRESS,
} CONFIG_STATE;
typedef struct
{
float64_t latitude;
float64_t longitude;
float64_t altitude;
} gps_info_t;
typedef struct
{
/**
* Progressive commands
* And accelerometers values transmitted to drone, FALSE otherwise
*/
float yaw, gaz, iphone_phi, iphone_theta, iphone_psi, iphone_psi_accuracy;
int32_t command_flag;
int framecounter;
int needVideoSwitch;
bool_t needAnimation;
char needAnimationParam[SMALL_STRING_SIZE];
bool_t needLedAnimation;
char needLedAnimationParam[SMALL_STRING_SIZE];
bool_t navdata_connected;
VIDEO_RECORDING_CAPABILITY recordingCapability;
} ControlData;
void initControlData(void);
void resetControlData(void);
void setApplicationDefaultConfig(void);
void setMagnetoEnabled(bool_t enabled);
void set_command_flag(int flag, bool_t enabled);
void getConfigSuccess(bool_t result);
void inputYaw(float percent);
void inputGaz(float percent);
void inputPitch(float percent);
void inputRoll(float percent);
void sendControls(void);
#endif // _CONTROLDATA_H_
/*
* ardrone_controller.c
*
* Created on: Apr 10, 2011
* Author: Dmytro Baryskyy
*/
// Ardrone Library
#include <ardrone_api.h>
#include "common.h"
#include "ControlData.h"
#include "../NavData/nav_data.h"
#include "virtual_gamepad.h"
#include "ARDroneGeneratedTypes.h"
#include "ardrone_controller.h"
static const char* TAG = "ARDRONE_CONTROLLER";
// Local helper methods
static bool_t get_state_from_mask(uint32_t state, CTRL_STATES value);
static bool_t is_power_param_valid(float32_t power);
static int channel = ARDRONE_VIDEO_CHANNEL_FIRST;
extern ControlData ctrldata;
void parrot_ardrone_ctrl_take_off()
{
ardrone_academy_navdata_takeoff();
}
void parrot_ardrone_ctrl_emergency()
{
ardrone_academy_navdata_emergency();
}
void parrot_ardrone_ctrl_set_command_flag(int32_t control_mode, bool_t enable)
{
set_command_flag(control_mode, enable);
}
void parrot_ardrone_ctrl_set_yaw(float32_t percent)
{
inputYaw(percent);
}
void parrot_ardrone_ctrl_set_gaz(float32_t percent)
{
inputGaz(percent);
}
void parrot_ardrone_ctrl_set_roll(float32_t percent)
{
inputRoll(percent);
}
void parrot_ardrone_ctrl_set_pitch(float32_t percent)
{
inputPitch(percent);
}
bool_t parrot_ardrone_ctrl_has_ctrl_status(CTRL_STATES value)
{
//return get_state_from_mask(instance_navdata.current_control_state, value);
return FALSE;
}
//
uint32_t parrot_ardrone_ctrl_get_battery_level()
{
// return instance_navdata.battery_level;
}
uint32_t parrot_ardrone_ctrl_get_altitude()
{
uint32_t altitude = 0;
// altitude = instance_navdata.altitude;
return altitude;
}
void parrot_ardrone_ctrl_set_yaw_max_angle(float32_t max_angle)
{
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_yaw, &max_angle, NULL);
}
void parrot_ardrone_ctrl_set_tilt_max_angle(float32_t max_angle)
{
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(euler_angle_max, &max_angle, NULL);
}
void parrot_ardrone_ctrl_set_vert_speed_max(float32_t speed)
{
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_vz_max, &speed, NULL);
}
void parrot_ardrone_ctrl_switch_camera(ardrone_tool_configuration_callback callback)
{
if(channel++ == ARDRONE_VIDEO_CHANNEL_LAST)
channel = ARDRONE_VIDEO_CHANNEL_FIRST;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(video_channel, (int32_t*)&channel, callback);
}
void parrot_ardrone_ctrl_set_flat_trim()
{
ardrone_at_set_flat_trim();
}
float32_t parrot_ardrone_ctrl_get_yaw_max_angle()
{
return ardrone_control_config.control_yaw;
}
float32_t parrot_ardrone_ctrl_get_tilt_max_angle()
{
return ardrone_control_config.euler_angle_max;
}
float32_t parrot_ardrone_ctrl_get_vert_speed_max()
{
return ardrone_control_config.control_vz_max;;
}
bool_t parrot_ardrone_ctrl_is_outdoor_mode()
{
return ardrone_control_config.outdoor;
}
void parrot_ardrone_ctrl_set_outdoor_mode(bool_t outdoor, ardrone_tool_configuration_callback callback)
{
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(outdoor, &outdoor, NULL);
}
bool_t get_state_from_mask(uint32_t state, CTRL_STATES value)
{
uint32_t major = state >> 16;
return (major == value?TRUE:FALSE);
}
bool_t is_power_param_valid(float32_t power)
{
if (power >= 0.0f && power <=1.0f) {
return TRUE;
}
LOGW(TAG, "Invalid power parameter : %f", power);
return FALSE;
}
/*
* ardrone_controller.h
*
* Created on: Apr 10, 2011
* Author: Dmytro Baryskyy
*/
#ifndef ARDRONE_CONTROLLER_H_
#define ARDRONE_CONTROLLER_H_
#include <control_states.h>
#include <ardrone_tool/UI/ardrone_input.h>
#include <ardrone_tool/ardrone_tool_configuration.h>
void parrot_ardrone_ctrl_take_off();
void parrot_ardrone_ctrl_land();
void parrot_ardrone_ctrl_emergency();
void parrot_ardrone_ctrl_emergency_clear();
// Functions that used to set the options
void parrot_ardrone_ctrl_set_yaw_max_angle (float32_t /*max_angle*/); //in radians
void parrot_ardrone_ctrl_set_tilt_max_angle(float32_t /*max_angle*/); //in radians
void parrot_ardrone_ctrl_set_vert_speed_max(float32_t /*speed*/); //in mm per second
void parrot_ardrone_ctrl_set_flat_trim();
void parrot_ardrone_ctrl_set_outdoor_mode(bool_t /*outdoor*/, ardrone_tool_configuration_callback /*callback*/);
void parrot_ardrone_ctrl_set_command_flag(int32_t /*control_mode*/, bool_t /*enable*/);
// Functions that controls gaz, yaw, roll and pitch
void parrot_ardrone_ctrl_set_yaw (float32_t /*yaw*/);
void parrot_ardrone_ctrl_set_gaz (float32_t /*gaz*/);
void parrot_ardrone_ctrl_set_roll (float32_t /*roll*/);
void parrot_ardrone_ctrl_set_pitch(float32_t /*pitch*/);
// Functions that control video
void parrot_ardrone_ctrl_switch_camera(ardrone_tool_configuration_callback /*callback*/);
bool_t parrot_ardrone_ctrl_has_ctrl_status(CTRL_STATES /*state*/);
uint32_t parrot_ardrone_ctrl_get_battery_level();
uint32_t parrot_ardrone_ctrl_get_altitude();
float32_t parrot_ardrone_ctrl_get_yaw_max_angle();
float32_t parrot_ardrone_ctrl_get_tilt_max_angle();
float32_t parrot_ardrone_ctrl_get_vert_speed_max();
bool_t parrot_ardrone_ctrl_is_outdoor_mode();
#endif /* ARDRONE_CONTROLLER_H_ */
/*
* virtual_gamepad.c
*
* Created on: May 13, 2011
* Author: Dmytro Baryskyy
*/
#include <VP_Os/vp_os_print.h>
#include <ardrone_tool/UI/ardrone_input.h>
#include "common.h"
#include "ControlData.h"
#include "virtual_gamepad.h"
// Defining callbacks for the virtual gamepad
input_device_t virtual_gamepad = {
"Virtual Gamepad",
open_gamepad,
update_gamepad,
close_gamepad
};
static const char* TAG = "VIRTUAL_GAMEPAD";
// Will be called once
C_RESULT open_gamepad(void)
{
LOGI (TAG, "GAMEPAD OPEN CALLED");
return C_OK;
}
// Will be called approx 30 times per second
C_RESULT update_gamepad(void)
{
sendControls();
return C_OK;
}
C_RESULT close_gamepad(void)
{
LOGI (TAG, "GAMEPAD CLOSE CALLED");
return C_OK;
}
/*
* virtual_gamepad.h
*
* Created on: May 13, 2011
* Author: Dmytro Baryskyy
*/
#ifndef VIRTUAL_GAMEPAD_H_
#define VIRTUAL_GAMEPAD_H_
#include <ardrone_tool/UI/ardrone_input.h>
// Setting virtual gamepad callbacks
extern input_device_t virtual_gamepad;
// Gamepad callbacks
static C_RESULT open_gamepad(void);
static C_RESULT update_gamepad(void);
static C_RESULT close_gamepad(void);
#endif /* VIRTUAL_GAMEPAD_H_ */
/*
* nav_data.c
*
* Created on: May 13, 2011
* Author: "Dmytro Baryskyy"
*/
// VP_Os Library
#include <VP_Os/vp_os_thread.h>
#include <VP_Os/vp_os_signal.h>
// ARDroneLib
#include "ARDroneTypes.h"
#include <control_states.h>
#include <ardrone_tool/Navdata/ardrone_navdata_file.h>
#include <ardrone_tool/Navdata/ardrone_navdata_client.h>
#include <ardrone_tool/UI/ardrone_input.h>
#include "common.h"
#include "nav_data.h"
static const char* TAG = "NAV_DATA";
navdata_unpacked_t inst_nav;
vp_os_mutex_t instance_navdata_mutex;
static bool_t bIsInitialized = FALSE;
inline C_RESULT navdata_init( void* data )
{
LOGD(TAG, "navdata_init");
vp_os_mutex_init(&instance_navdata_mutex);
vp_os_mutex_lock( &instance_navdata_mutex);
navdata_reset(&inst_nav);
bIsInitialized = TRUE;
vp_os_mutex_unlock( &instance_navdata_mutex);
return C_OK;
}
inline C_RESULT navdata_process( const navdata_unpacked_t* const navdata )
{
if (bIsInitialized == FALSE) {
LOGW(TAG, "Navdata is not initialized yet");
return C_OK;
}
vp_os_mutex_lock( &instance_navdata_mutex);
vp_os_memcpy(&inst_nav, navdata, sizeof(navdata_unpacked_t));
vp_os_mutex_unlock( &instance_navdata_mutex );
return C_OK;
}
inline C_RESULT navdata_release( void )
{
LOGI(TAG, "navdata_release");
vp_os_mutex_destroy(&instance_navdata_mutex);
bIsInitialized = FALSE;
return C_OK;
}
C_RESULT navdata_reset(navdata_unpacked_t *nav)
{
C_RESULT result = C_FAIL;
if(nav)
{
vp_os_memset(nav, 0x0, sizeof(navdata_unpacked_t));
nav->ardrone_state |= ARDRONE_NAVDATA_BOOTSTRAP;
result = C_OK;
}
return result;
}
C_RESULT navdata_get(navdata_unpacked_t *data)
{
C_RESULT result = C_FAIL;
if(data)
{
vp_os_mutex_lock( &instance_navdata_mutex );
vp_os_memcpy(data, &inst_nav, sizeof(navdata_unpacked_t));
vp_os_mutex_unlock( &instance_navdata_mutex );
result = C_OK;
}
return result;
}
BEGIN_NAVDATA_HANDLER_TABLE
NAVDATA_HANDLER_TABLE_ENTRY(navdata_init, navdata_process, navdata_release, NULL)
END_NAVDATA_HANDLER_TABLE
/*
* nav_data.h
*
* Created on: May 13, 2011
* Author: "Dmytro Baryskyy"
*/
#ifndef NAV_DATA_H_
#define NAV_DATA_H_
//typedef struct _instance_navdata_t {
// uint32_t battery_level;
// uint32_t altitude;
// uint32_t prev_control_state;
// uint32_t current_control_state;
// uint32_t ardrone_state;
// uint32_t alert_state;
// uint32_t emergency_state;
// uint32_t num_frames;
// bool_t wifiReachable;
// bool_t flying;
//} instance_navdata_t;
//extern instance_navdata_t instance_navdata;
C_RESULT navdata_reset(navdata_unpacked_t *nav);
C_RESULT navdata_get(navdata_unpacked_t *data);
C_RESULT navdata_write_to_file(bool_t enable);
//extern C_RESULT parrot_ardrone_navdata_get(instance_navdata_t */*data*/);
//extern inline void parrot_ardrone_navdata_checkErrors();
#endif /* NAV_DATA_H_ */
/*
* plf.c
* ARUpdater
*
* Created by f.dhaeyer on 06/07/10.
* Copyright 2010 Parrot SA. All rights reserved.
*
*/
#include <stdio.h>
#include <string.h>
#include "plf.h"
/** Get version numbers of plf
* @return -1 if error of file, else 0
*/
int
plf_get_header(const char *plf_filename, plf_phdr *header)
{
plf_phdr h;
int result = -1;
FILE *f = fopen(plf_filename, "rb");
if(f != NULL)
{
printf("File %s opened\n", plf_filename);
if(fread(&h, 1, sizeof(plf_phdr), f) == sizeof(plf_phdr))
{
memcpy(header, &h, sizeof(plf_phdr));
result = 0;
}
fclose(f);
}
return result;
}
/*
* plf.h
* ARUpdater
*
* Created by f.dhaeyer on 06/07/10.
* Copyright 2010 Parrot SA. All rights reserved.
*
*/
#ifndef _PLF_H_
#define _PLF_H_
#define PLF_CURRENT_VERSION 10
#define PLF_HEADER_MAGIC 0x21464c50 //!< PLF magic number
typedef unsigned int Plf_Word; //!< Unsigned 32 bits integer
typedef unsigned short Plf_Half; //!< Unsigned 16 bits integer
typedef void* Plf_Add; //!< 32 bits address
//! PLF file header
typedef struct {
Plf_Word p_magic; //!< PLF magic number
Plf_Word p_plfversion; //!< PLF format version
Plf_Word p_phdrsize; //!< File header size
Plf_Word p_shdrsize; //!< Section header size
Plf_Word p_type; //!< File type
Plf_Add p_entry; //!< Executable entry point
Plf_Word p_targ; //!< Target platform
Plf_Word p_app; //!< Target application
Plf_Word p_hdw; //!< Hardware compatibility
Plf_Word p_ver; //!< Version
Plf_Word p_edit; //!< Edition
Plf_Word p_ext; //!< Extension
Plf_Word p_lang; //!< Language zone
Plf_Word p_size; //!< File size in bytes
} plf_phdr;
int plf_get_header(const char *plf_filename, plf_phdr *header);
#endif // _PLF_H_
/*
* drone_confg_stub.c
*
* Created on: Jun 10, 2011
* Author: Dmytro Baryskyy
*/
#include "common.h"
#include "drone_config_stub.h"
static jobject configObj = NULL;
static const char* TAG = "DRONE_CONFIG_STUB";
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateOutdoorHullNative(JNIEnv *env, jobject obj)
{
jclass configCls = (*env)->GetObjectClass(env, obj);
// Outdoor hull
jfieldID outdoorHullFid = (*env)->GetFieldID(env, configCls, "outdoorHull", "Z");
jboolean bOutdoorHull = (*env)->GetBooleanField(env, obj, outdoorHullFid);
ardrone_control_config.flight_without_shell = bOutdoorHull;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(flight_without_shell, &ardrone_control_config.flight_without_shell, NULL);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateAdaptiveVideoNative(JNIEnv *env, jobject obj)
{
jclass configCls = (*env)->GetObjectClass(env, obj);
// Adaptive video
jfieldID adaptiveVideoFid = (*env)->GetFieldID(env, configCls, "adaptiveVideo", "Z");
jboolean bAdaptiveVideo = (*env)->GetBooleanField(env, obj, adaptiveVideoFid);
if (IS_ARDRONE1) {
ARDRONE_VARIABLE_BITRATE enabled = (bAdaptiveVideo == TRUE) ? ARDRONE_VARIABLE_BITRATE_MODE_DYNAMIC : ARDRONE_VARIABLE_BITRATE_MANUAL;
uint32_t constantBitrate = (UVLC_CODEC == ardrone_control_config.video_codec) ? 20000 : 15000;
ardrone_control_config.bitrate_ctrl_mode = enabled;
ardrone_control_config.bitrate = constantBitrate;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(bitrate_ctrl_mode, &ardrone_control_config.bitrate_ctrl_mode, NULL);
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(bitrate, &ardrone_control_config.bitrate, NULL);
} else if (IS_ARDRONE2) {
ARDRONE_VARIABLE_BITRATE enabled = (bAdaptiveVideo) ? ARDRONE_VARIABLE_BITRATE_MODE_DYNAMIC : ARDRONE_VARIABLE_BITRATE_MODE_DISABLED;
ardrone_control_config.bitrate_ctrl_mode = enabled;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(bitrate_ctrl_mode, &ardrone_control_config.bitrate_ctrl_mode, NULL);
}
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateOwnerMacNative(JNIEnv *env, jobject obj)
{
jclass configCls = (*env)->GetObjectClass(env, obj);
// Owner Mac
jfieldID pairingFid = (*env)->GetFieldID(env, configCls, "ownerMac", "Ljava/lang/String;");
jstring strOwnerMac = (*env)->GetObjectField(env, obj, pairingFid);
const jbyte *owner_mac_arr;
owner_mac_arr = (*env)->GetStringUTFChars(env, strOwnerMac, NULL);
if (owner_mac_arr == NULL) {
return; /* OutOfMemoryError already thrown */
}
strcpy(ardrone_control_config.owner_mac, owner_mac_arr);
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(owner_mac, ardrone_control_config.owner_mac, NULL);
(*env)->ReleaseStringUTFChars(env, strOwnerMac, owner_mac_arr);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateAltitudeLimit(JNIEnv *env, jobject obj, jint altitude)
{
ardrone_control_config.altitude_max = altitude * 1000;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(altitude_max, &ardrone_control_config.altitude_max, NULL);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateOutdoorFlightNative(JNIEnv *env, jobject obj)
{
jclass configCls = (*env)->GetObjectClass(env, obj);
jfieldID outdoorFlightFid = (*env)->GetFieldID(env, configCls, "outdoorFlight", "Z");
ardrone_control_config.outdoor = (*env)->GetBooleanField(env, obj, outdoorFlightFid);;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(outdoor, &ardrone_control_config.outdoor, NULL);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateYawSpeedMaxNative(JNIEnv *env, jobject obj)
{
jclass configCls = (*env)->GetObjectClass(env, obj);
// Yaw Speed Max
jfieldID yawSpeedMaxFid = (*env)->GetFieldID(env, configCls, "yawSpeedMax", "I");
jint yawSpeedMax = (*env)->GetIntField(env, obj, yawSpeedMaxFid);
ardrone_control_config.control_yaw = (float)yawSpeedMax * DEG_TO_RAD;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_yaw, &ardrone_control_config.control_yaw, NULL);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateVertSpeedMaxNative(JNIEnv *env, jobject obj)
{
jclass configCls = (*env)->GetObjectClass(env, obj);
// Vertical Speed Max
jfieldID vertSpeedMaxFid = (*env)->GetFieldID(env, configCls, "vertSpeedMax", "I");
jint vertSpeedMax = (*env)->GetIntField(env, obj,vertSpeedMaxFid);
ardrone_control_config.control_vz_max = vertSpeedMax;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_vz_max, &ardrone_control_config.control_vz_max, NULL);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateTiltNative(JNIEnv *env, jobject obj)
{
jclass configCls = (*env)->GetObjectClass(env, obj);
// Tilt
jfieldID tiltFid = (*env)->GetFieldID(env, configCls, "tilt", "I");
jint tiltMax = (*env)->GetIntField(env, obj, tiltFid);
ardrone_control_config.euler_angle_max = (float)tiltMax * DEG_TO_RAD;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(euler_angle_max, &ardrone_control_config.euler_angle_max, NULL);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateDeviceTiltMax(JNIEnv *env, jobject obj, jint tilt)
{
ardrone_control_config.control_iphone_tilt = (float)tilt * DEG_TO_RAD;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_iphone_tilt, &ardrone_control_config.control_iphone_tilt, NULL);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateNetworkNameNative(JNIEnv *env, jobject obj)
{
jclass configCls = (*env)->GetObjectClass(env, obj);
// Network name
jfieldID networkNameFid = (*env)->GetFieldID(env, configCls, "networkName", "Ljava/lang/String;");
jstring strNetworkName = (*env)->GetObjectField(env, obj, networkNameFid);
const jbyte *network_name_str = (*env)->GetStringUTFChars(env, strNetworkName, NULL);
if (network_name_str == NULL) {
return; /* OutOfMemoryError already thrown */
}
strcpy(ardrone_control_config.ssid_single_player,network_name_str);
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(ssid_single_player, &ardrone_control_config.ssid_single_player, NULL);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateVideoCodecNative(JNIEnv *env, jobject obj)
{
jclass configCls = (*env)->GetObjectClass(env, obj);
// Video codec
jfieldID videoCodecFid = (*env)->GetFieldID(env, configCls, "videoCodec", "I");
jint videoCodec = (*env)->GetIntField(env, obj, videoCodecFid);
if (IS_ARDRONE1) {
if (videoCodec == P264_CODEC || videoCodec == UVLC_CODEC) {
LOGI(TAG, "Setting %s codec", (videoCodec == P264_CODEC?"P264":"UVLC"));
ardrone_control_config.video_codec = videoCodec;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(video_codec, &ardrone_control_config.video_codec, NULL);
ardrone_control_config.bitrate = (UVLC_CODEC == ardrone_control_config.video_codec) ? 20000 : 15000;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(bitrate, &ardrone_control_config.bitrate, NULL);
} else {
LOGW(TAG, "Can't set codec. Unknown codec %d", videoCodec);
}
} else if (IS_ARDRONE2) {
if (videoCodec > UVLC_CODEC && videoCodec <= H264_AUTO_RESIZE_CODEC) {
ardrone_control_config.video_codec = videoCodec;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(video_codec, &ardrone_control_config.video_codec, NULL);
} else {
LOGW(TAG, "Can't set codec. Unknown codec %d", videoCodec);
}
}
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_updateRecordOnUsb(JNIEnv *env, jobject obj)
{
if (IS_ARDRONE2) {
ardrone_control_config.video_on_usb = java_get_bool_field_value(env, obj, "recordOnUsb");
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(video_on_usb, &ardrone_control_config.video_on_usb, NULL);
LOGD(TAG, "Settings Video on USB to %d", ardrone_control_config.video_on_usb);
} else {
LOGW(TAG, "Can't set video on usb value for AR.Drone 1");
}
}
JNIEXPORT jint JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_getDroneFamily(JNIEnv *env, jobject obj)
{
return ARDRONE_VERSION();
}
JNIEXPORT int JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_getFtpPortNative(JNIEnv *env, jclass class)
{
return FTP_PORT;
}
JNIEXPORT jstring JNICALL
Java_com_parrot_freeflight_drone_DroneConfig_getDroneHostNative(JNIEnv *env, jclass class)
{
jstring host = (*env)->NewStringUTF(env, WIFI_ARDRONE_IP);
return host;
}
/*
* drone_config_stub.h
*
* Created on: Jun 10, 2011
* Author: "Dmytro Baryskyy"
*/
#ifndef DRONE_CONFIG_STUB_H_
#define DRONE_CONFIG_STUB_H_
extern const int ALTITUDE_LIMITED;
extern const int NO_ALTITUDE;
#endif /* DRONE_CONFIG_STUB_H_ */
/*
* drone_stub.c
*
* Created on: May 10, 2011
* Author: Dmytro Baryskyy
*/
// VP_SDK
#include <VP_Os/vp_os_print.h>
#include <VP_Api/vp_api_thread_helper.h>
// ARDroneLib
#include <ardrone_tool/UI/ardrone_input.h>
#include "common.h"
#include <math.h>
#include "../Controller/ardrone_controller.h"
#include "../NavData/nav_data.h"
#include "ControlData.h"
#include "app.h"
#include "../Video/video_stage_renderer.h"
#include "../Callbacks/drone_proxy_callbacks.h"
#include "../Stubs/drone_config_stub.h"
static const char* TAG = "DRONE_STUB";
int errorState;
extern ControlData ctrldata;
navdata_unpacked_t ctrlnavdata;
static jobject configObj = NULL;
static jobject droneProxyObj = NULL;
static gps_info_t gpsInfo = { 0 };
static CONFIG_STATE gpsState = CONFIG_STATE_IDLE;
static CONFIG_STATE configurationState = CONFIG_STATE_IDLE;
static CONFIG_STATE prevConfigurationState = CONFIG_STATE_IDLE;
bool_t magnetoEnabled;
void ardrone_engine_message_received(JNIEnv* env, jobject obj, ardrone_engine_message_t message)
{
switch (message) {
case ARDRONE_MESSAGE_CONNECTED_OK:
LOGI(TAG, "Sending ARDRONE_MESSAGE_CONNECTED_OK");
parrot_drone_proxy_onConnected(env, obj);
break;
case ARDRONE_MESSAGE_DISCONNECTED:
LOGI(TAG, "Sending ARDRONE_MESSAGE_DISCONNECTED");
parrot_drone_proxy_onDisconnected(env, obj);
break;
case ARDRONE_MESSAGE_ERR_NO_WIFI:
LOGI(TAG, "Sending ARDRONE_MESSAGE_ERR_NO_WIFI");
parrot_drone_proxy_onConnectionFailed(env, obj, ARDRONE_MESSAGE_ERR_NO_WIFI);
break;
case ARDRONE_MESSAGE_UNKNOWN_ERR:
LOGI(TAG, "Sending ARDRONE_MESSAGE_UNKNOWN_ERR");
parrot_drone_proxy_onConnectionFailed(env, obj, ARDRONE_MESSAGE_UNKNOWN_ERR);
break;
default:
LOGW(TAG, "Unknown ardrone engine message: %d", message);
}
}
void ardrone_academy_callback_called(const char *mediaPath, bool_t addToQueue)
{
JNIEnv* env = NULL;
if (g_vm) {
(*g_vm)->AttachCurrentThread (g_vm, (JNIEnv **) &env, NULL);
}
if (env != NULL && droneProxyObj != NULL) {
parrot_java_callbacks_call_void_method_string_boolean(env, droneProxyObj, "onAcademyNewMediaReady", mediaPath, addToQueue);
} else {
LOGW(TAG, "Academy callback. Can't get env");
}
if (g_vm) {
(*g_vm)->DetachCurrentThread(g_vm);
}
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_initNavdata(JNIEnv *env, jobject obj)
{
initControlData();
navdata_unpacked_t navdata;
navdata_get(&navdata);
if (VP_SUCCEEDED(navdata_reset(&navdata)) == FALSE)
{
LOGW(TAG, "Nvdata reset [FAILED]");
} else
{
LOGD(TAG, "Navdata reset [OK]");
}
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_connect(JNIEnv *env, jobject obj,
jstring appName,
jstring userName,
jstring rootDir,
jstring flightDir,
jint flightStoringSize,
jint recordingCapabilities)
{
LOGI(TAG, "Connect called");
droneProxyObj = (*env)->NewGlobalRef(env, obj);
const char *str_app_name = (*env)->GetStringUTFChars(env, appName, NULL);
const char *str_usr_name = (*env)->GetStringUTFChars(env, userName, NULL);
const char *str_app_dir = (*env)->GetStringUTFChars(env, rootDir, NULL);
const char *str_flight_dir = (*env)->GetStringUTFChars(env, flightDir, NULL);
parrot_ardrone_notify_start(env, obj, ardrone_engine_message_received, str_app_name, str_usr_name, str_app_dir, str_flight_dir, flightStoringSize, ardrone_academy_callback_called, recordingCapabilities);
(*env)->ReleaseStringUTFChars(env, appName, str_app_name);
(*env)->ReleaseStringUTFChars(env, userName, str_usr_name);
(*env)->ReleaseStringUTFChars(env, rootDir, str_app_dir);
(*env)->ReleaseStringUTFChars(env, flightDir, str_flight_dir);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_pause(JNIEnv *env, jobject obj)
{
LOGI(TAG, "Pause called");
parrot_ardrone_notify_pause();
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_setDefaultConfigurationNative(JNIEnv *env, jobject obj)
{
setApplicationDefaultConfig();
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_resume(JNIEnv *env, jobject obj)
{
LOGI(TAG, "Resume called");
parrot_ardrone_notify_resume();
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_disconnect(JNIEnv *env, jobject obj)
{
LOGI(TAG, "Exit called");
parrot_ardrone_notify_exit();
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_triggerTakeOff(JNIEnv *env, jobject obj)
{
LOGI(TAG, "Trigger take off called");
parrot_ardrone_ctrl_take_off();
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_triggerEmergency(JNIEnv *env, jobject obj)
{
LOGI(TAG, "Trigger emergency called");
parrot_ardrone_ctrl_emergency();
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_setControlValue(JNIEnv *env, jobject obj, jint command, jfloat value)
{
switch (command)
{
case CONTROL_SET_GAZ:
parrot_ardrone_ctrl_set_gaz(value);
break;
case CONTROL_SET_YAW:
parrot_ardrone_ctrl_set_yaw(value);
break;
case CONTROL_SET_ROLL:
parrot_ardrone_ctrl_set_roll(value);
break;
case CONTROL_SET_PITCH:
parrot_ardrone_ctrl_set_pitch(value);
break;
default:
LOGW(TAG, "Unknown control command %d", command);
}
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_setMagnetoEnabled(JNIEnv *env, jobject obj, jboolean enabled)
{
// setMagnetoEnabled(enabled);
magnetoEnabled = enabled;
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_setCommandFlag(JNIEnv *env, jobject obj, jint flag, jboolean enable)
{
set_command_flag(flag, enable);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_setDeviceOrientation(JNIEnv *env, jobject obj, jint heading, jint accuracy)
{
ctrldata.iphone_psi = heading;
if(ctrldata.iphone_psi > 180)
{
ctrldata.iphone_psi -= 360;
}
ctrldata.iphone_psi /= 180;
ctrldata.iphone_psi_accuracy = accuracy;
if (magnetoEnabled && ctrldata.iphone_psi_accuracy >= 0) {
set_command_flag(ARDRONE_MAGNETO_CMD_ENABLE, TRUE);
} else {
set_command_flag(ARDRONE_MAGNETO_CMD_ENABLE, FALSE);
}
}
void getConfigSuccess(bool_t result)
{
if(result) {
configurationState = CONFIG_STATE_IDLE;
LOGD(TAG, "CONFIGURATION GET [OK]");
} else {
LOGD(TAG, "CONFIGURATION GET [FAIL]");
}
}
void gpsConfigSuccess(bool_t result)
{
if(result)
gpsState = CONFIG_STATE_IDLE;
}
void checkErrors(JNIEnv* env, navdata_unpacked_t ctrlnavdata)
{
input_state_t* input_state = ardrone_tool_input_get_state();
if(configurationState == CONFIG_STATE_NEEDED)
{
configurationState = CONFIG_STATE_IN_PROGRESS;
ARDRONE_TOOL_CONFIGURATION_GET(getConfigSuccess);
LOGD(TAG, "CONFIGURATION GET [sent]");
}
if (prevConfigurationState != configurationState && configurationState == CONFIG_STATE_IDLE)
{
if (configObj != NULL) {
parrot_drone_proxy_onConfigChanged(env, configObj);
(*env)->DeleteGlobalRef(env, configObj);
configObj = NULL;
LOGD(TAG, "OnConfigChanged sent [OK]");
}
}
prevConfigurationState = configurationState;
if((gpsState == CONFIG_STATE_NEEDED) && configWasDone)
{
float64_t d_value;
gpsState = CONFIG_STATE_IN_PROGRESS;
d_value = gpsInfo.latitude;
LOGD(TAG, "Userbox latitude : %lf", d_value);
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(latitude, &d_value, NULL);
d_value = gpsInfo.longitude;
LOGD(TAG, "Userbox longitude : %lf", d_value);
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(longitude, &d_value, NULL);
d_value = gpsInfo.altitude;
LOGD(TAG, "Userbox altitude : %lf", d_value);
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(altitude, &d_value, gpsConfigSuccess);
ardrone_video_set_gps_infos(gpsInfo.latitude, gpsInfo.longitude, gpsInfo.altitude);
LOGD(TAG, "GPS location sent [OK]");
}
errorState = ERROR_STATE_NONE;
if(ardrone_navdata_client_get_num_retries())
{
ctrldata.navdata_connected = FALSE;
errorState = ERROR_STATE_NAVDATA_CONNECTION;
resetControlData();
navdata_reset(&ctrlnavdata);
LOGD(TAG, "NAVDATA Reset [OK]");
}
else
{
ctrldata.navdata_connected = TRUE;
if(ardrone_academy_navdata_get_emergency_state())
{
if(ctrlnavdata.ardrone_state & ARDRONE_CUTOUT_MASK)
{
errorState = ERROR_STATE_EMERGENCY_CUTOUT;
}
else if(ctrlnavdata.ardrone_state & ARDRONE_MOTORS_MASK)
{
errorState = ERROR_STATE_EMERGENCY_MOTORS;
}
else if(!(ctrlnavdata.ardrone_state & ARDRONE_VIDEO_THREAD_ON))
{
errorState = ERROR_STATE_EMERGENCY_CAMERA;
}
else if(ctrlnavdata.ardrone_state & ARDRONE_ADC_WATCHDOG_MASK)
{
errorState = ERROR_STATE_EMERGENCY_PIC_WATCHDOG;
}
else if(!(ctrlnavdata.ardrone_state & ARDRONE_PIC_VERSION_MASK))
{
errorState = ERROR_STATE_EMERGENCY_PIC_VERSION;
}
else if(ctrlnavdata.ardrone_state & ARDRONE_ANGLES_OUT_OF_RANGE)
{
errorState = ERROR_STATE_EMERGENCY_ANGLE_OUT_OF_RANGE;
}
else if(ctrlnavdata.ardrone_state & ARDRONE_VBAT_LOW)
{
errorState = ERROR_STATE_EMERGENCY_VBAT_LOW;
}
else if(ctrlnavdata.ardrone_state & ARDRONE_USER_EL)
{
errorState = ERROR_STATE_EMERGENCY_USER_EL;
}
else if(ctrlnavdata.ardrone_state & ARDRONE_ULTRASOUND_MASK)
{
errorState = ERROR_STATE_EMERGENCY_ULTRASOUND;
}
else
{
errorState = ERROR_STATE_EMERGENCY_UNKNOWN;
}
FLYING_STATE currentFlyingState = ardrone_academy_navdata_get_flying_state (&ctrlnavdata);
if (FLYING_STATE_LANDED == currentFlyingState)
{
resetControlData();
navdata_reset(&ctrlnavdata);
}
}
else
{
if(video_stage_get_num_retries() > VIDEO_MAX_RETRIES)
{
errorState = ERROR_STATE_ALERT_CAMERA;
}
else if(ctrlnavdata.ardrone_state & ARDRONE_VBAT_LOW)
{
errorState = ERROR_STATE_ALERT_VBAT_LOW;
}
else if(ctrlnavdata.ardrone_state & ARDRONE_ULTRASOUND_MASK)
{
errorState = ERROR_STATE_ALERT_ULTRASOUND;
}
else if(!(ctrlnavdata.ardrone_state & ARDRONE_VISION_MASK))
{
FLYING_STATE tmp_state = ardrone_academy_navdata_get_flying_state(&ctrlnavdata);
if(tmp_state == FLYING_STATE_FLYING)
{
errorState = ERROR_STATE_ALERT_VISION;
}
}
if((input_state->user_input & (1 << ARDRONE_UI_BIT_START)) && !ardrone_academy_navdata_get_takeoff_state())
errorState = ERROR_STATE_START_NOT_RECEIVED;
}
}
}
JNIEXPORT jobject JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_takeNavDataSnapshot(JNIEnv *env, jobject obj, jobject navdataObj)
{
jclass navdataCls = (*env)->FindClass(env,"com/parrot/freeflight/drone/NavData");
if (navdataCls == NULL) {
if ((*env)->ExceptionOccurred(env)) {
(*env)->ExceptionDescribe(env);
}
LOGD(TAG, "Failed to get class com.parrot.freeflight.drone.NavData");
return navdataObj;
}
if (navdataObj != NULL) {
/*
* Setting the data to the object
*/
// Getting current navdata
navdata_unpacked_t navdata;
if (VP_SUCCEEDED(navdata_get(&navdata))) {
checkErrors(env, navdata);
// Getting field ids
jfieldID batteryStatusFid = (*env)->GetFieldID(env, navdataCls, "batteryStatus", "I"); // "I" stands for "int" type
jfieldID flyingFid = (*env)->GetFieldID(env, navdataCls, "flying", "Z"); // "Z" stands for "boolean" type
jfieldID emergencyStateFid = (*env)->GetFieldID(env, navdataCls, "emergencyState", "I");
jfieldID numFramesFid = (*env)->GetFieldID(env, navdataCls, "numFrames", "I");
jfieldID initializedFid = (*env)->GetFieldID(env, navdataCls, "initialized", "Z");
// Filling the data into fields
(*env)->SetIntField(env, navdataObj, batteryStatusFid, navdata.navdata_demo.vbat_flying_percentage);
(*env)->SetIntField(env, navdataObj, emergencyStateFid, errorState);
(*env)->SetBooleanField(env, navdataObj, flyingFid, ardrone_academy_navdata_get_takeoff_state());
(*env)->SetIntField(env, navdataObj, numFramesFid, navdata.navdata_demo.num_frames);
(*env)->SetBooleanField(env, navdataObj, initializedFid, configWasDone);
java_set_field_bool(env, navdataObj, "recording", ardrone_academy_navdata_get_record_state());
java_set_field_int(env, navdataObj, "usbRemainingTime", ardrone_academy_navdata_get_remaining_usb_time());
java_set_field_bool(env, navdataObj, "usbActive", ardrone_academy_navdata_get_usb_state());
java_set_field_bool(env, navdataObj, "cameraReady", ardrone_academy_navdata_get_camera_state());
java_set_field_bool(env, navdataObj, "recordReady", !ardrone_academy_navdata_get_record_ready());
} else {
LOGW(TAG, "navdata_get [FAILED]");
}
} else {
LOGE(TAG, "takeNavDataSnapshot: Illegal input parameter navdataObj. configObj == NULL");
}
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, navdataCls);
return navdataObj;
}
JNIEXPORT jobject JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_takeConfigSnapshot(JNIEnv *env, jobject obj, jobject configObj)
{
LOGI(TAG, "takeConfigSnapshot Called");
jclass configCls = (*env)->FindClass(env,"com/parrot/freeflight/drone/DroneConfig");
if (configCls == NULL) {
if ((*env)->ExceptionOccurred(env)) {
(*env)->ExceptionDescribe(env);
}
LOGE(TAG, "Failed to get class com.parrot.freeflight.drone.DroneConfig");
return configObj;
}
if (configObj != NULL) {
// Ar.Drone Software Version
jfieldID softVersionFid = (*env)->GetFieldID(env, configCls, "softwareVersion", "Ljava/lang/String;");
jstring strSoftVersion = (*env)->NewStringUTF(env, ardrone_control_config.num_version_soft);
(*env)->SetObjectField(env, configObj, softVersionFid, strSoftVersion);
(*env)->DeleteLocalRef(env, strSoftVersion);
// Ar.Drone Hardware Version
char droneHardVersion[256] = {0};
if (-1 == snprintf(droneHardVersion, 256*sizeof(char), "%x.%x", ardrone_control_config.num_version_mb >> 4, ardrone_control_config.num_version_mb & 0x0f)) {
LOGW(TAG, "Can't set drone hardware version");
}
jfieldID droneHardVersionFid = (*env)->GetFieldID(env, configCls, "hardwareVersion", "Ljava/lang/String;");
jstring strDroneHardVersion = (*env)->NewStringUTF(env, droneHardVersion);
(*env)->SetObjectField(env, configObj, droneHardVersionFid, strDroneHardVersion);
(*env)->DeleteLocalRef(env, strDroneHardVersion);
char hardVersion[256] = {0};
char softVersion[256] = {0};
if(ardrone_control_config.pic_version != 0)
{
uint32_t hard_major = (ardrone_control_config.pic_version >> 27) + 1;
uint32_t hard_minor = (ardrone_control_config.pic_version >> 24) & 0x7;
if (-1 == snprintf(hardVersion, 256*sizeof(char), "%x.%x", hard_major, hard_minor)) {
LOGW(TAG, "Can't set hard version");
}
if (-1 == snprintf(softVersion, 256*sizeof(char), "%d.%d", (int)((ardrone_control_config.pic_version & 0xFFFFFF) >> 16),(int)(ardrone_control_config.pic_version & 0xFFFF)))
{
LOGW(TAG, "Can't set soft version");
}
}
// Inertial software version
jfieldID inertialSoftVerFid = (*env)->GetFieldID(env, configCls, "inertialSoftwareVersion", "Ljava/lang/String;");
jstring strInertialSoftVersion = (*env)->NewStringUTF(env, softVersion);
(*env)->SetObjectField(env, configObj, inertialSoftVerFid, strInertialSoftVersion);
(*env)->DeleteLocalRef(env, strInertialSoftVersion);
// Inertial hardware version
jfieldID inertiaHardVerFid = (*env)->GetFieldID(env, configCls, "inertialHardwareVersion", "Ljava/lang/String;");
jstring strInertialHardVersion = (*env)->NewStringUTF(env, hardVersion);
(*env)->SetObjectField(env, configObj, inertiaHardVerFid, strInertialHardVersion);
(*env)->DeleteLocalRef(env, strInertialHardVersion);
// Motor 1 type
jfieldID motor1typeFid = (*env)->GetFieldID(env, configCls, "motor1Vendor", "Ljava/lang/String;");
jstring strMotor1Type = (*env)->NewStringUTF(env, ardrone_control_config.motor1_supplier);
(*env)->SetObjectField(env, configObj, motor1typeFid, strMotor1Type);
(*env)->DeleteLocalRef(env, strMotor1Type);
// Motor 2 type
jfieldID motor2typeFid = (*env)->GetFieldID(env, configCls, "motor2Vendor", "Ljava/lang/String;");
jstring strMotor2Type = (*env)->NewStringUTF(env, ardrone_control_config.motor2_supplier);
(*env)->SetObjectField(env, configObj, motor2typeFid, strMotor2Type);
(*env)->DeleteLocalRef(env, strMotor2Type);
// Motor 3 type
jfieldID motor3typeFid = (*env)->GetFieldID(env, configCls, "motor3Vendor", "Ljava/lang/String;");
jstring strMotor3Type = (*env)->NewStringUTF(env, ardrone_control_config.motor3_supplier);
(*env)->SetObjectField(env, configObj, motor3typeFid, strMotor3Type);
(*env)->DeleteLocalRef(env, strMotor3Type);
// Motor 4 type
jfieldID motor4typeFid = (*env)->GetFieldID(env, configCls, "motor4Vendor", "Ljava/lang/String;");
jstring strMotor4Type = (*env)->NewStringUTF(env, ardrone_control_config.motor4_supplier);
(*env)->SetObjectField(env, configObj, motor4typeFid, strMotor4Type);
(*env)->DeleteLocalRef(env, strMotor4Type);;
// Motor 1 hardware version
jfieldID motor1HardVersionFid = (*env)->GetFieldID(env, configCls, "motor1HardVersion", "Ljava/lang/String;");
jstring strMotor1Hard = (*env)->NewStringUTF(env, ardrone_control_config.motor1_hard);
(*env)->SetObjectField(env, configObj, motor1HardVersionFid, strMotor1Hard);
(*env)->DeleteLocalRef(env, strMotor1Hard);
// Motor 2 hardware version
jfieldID motor2HardVersionFid = (*env)->GetFieldID(env, configCls, "motor2HardVersion", "Ljava/lang/String;");
jstring strMotor2Hard = (*env)->NewStringUTF(env, ardrone_control_config.motor2_hard);
(*env)->SetObjectField(env, configObj, motor2HardVersionFid, strMotor2Hard);
(*env)->DeleteLocalRef(env, strMotor2Hard);
// Motor 3 hardware version
jfieldID motor3HardVersionFid = (*env)->GetFieldID(env, configCls, "motor3HardVersion", "Ljava/lang/String;");
jstring strMotor3Hard = (*env)->NewStringUTF(env, ardrone_control_config.motor3_hard);
(*env)->SetObjectField(env, configObj, motor3HardVersionFid, strMotor3Hard);
(*env)->DeleteLocalRef(env, strMotor3Hard);
// Motor 4 hardware version
jfieldID motor4HardVersionFid = (*env)->GetFieldID(env, configCls, "motor4HardVersion", "Ljava/lang/String;");
jstring strMotor4Hard = (*env)->NewStringUTF(env, ardrone_control_config.motor4_hard);
(*env)->SetObjectField(env, configObj, motor4HardVersionFid, strMotor4Hard);
(*env)->DeleteLocalRef(env, strMotor4Hard);
// Motor 1 software version
jfieldID motor1SoftVersionFid = (*env)->GetFieldID(env, configCls, "motor1SoftVersion", "Ljava/lang/String;");
jstring strMotor1Soft = (*env)->NewStringUTF(env, ardrone_control_config.motor1_soft);
(*env)->SetObjectField(env, configObj, motor1SoftVersionFid, strMotor1Soft);
(*env)->DeleteLocalRef(env, strMotor1Soft);
// Motor 2 software version
jfieldID motor2SoftVersionFid = (*env)->GetFieldID(env, configCls, "motor2SoftVersion", "Ljava/lang/String;");
jstring strMotor2Soft = (*env)->NewStringUTF(env, ardrone_control_config.motor2_soft);
(*env)->SetObjectField(env, configObj, motor2SoftVersionFid, strMotor2Soft);
(*env)->DeleteLocalRef(env, strMotor2Soft);
// Motor 3 software version
jfieldID motor3SoftVersionFid = (*env)->GetFieldID(env, configCls, "motor3SoftVersion", "Ljava/lang/String;");
jstring strMotor3Soft = (*env)->NewStringUTF(env, ardrone_control_config.motor3_soft);
(*env)->SetObjectField(env, configObj, motor3SoftVersionFid, strMotor3Soft);
(*env)->DeleteLocalRef(env, strMotor3Soft);
// Motor 4 software version
jfieldID motor4SoftVersionFid = (*env)->GetFieldID(env, configCls, "motor4SoftVersion", "Ljava/lang/String;");
jstring strMotor4Soft = (*env)->NewStringUTF(env, ardrone_control_config.motor4_soft);
(*env)->SetObjectField(env, configObj, motor4SoftVersionFid, strMotor4Soft);
(*env)->DeleteLocalRef(env, strMotor4Soft);
// Network name
jfieldID networkNameFid = (*env)->GetFieldID(env, configCls, "networkName", "Ljava/lang/String;");
jstring strNetworkName = (*env)->NewStringUTF(env, ardrone_control_config.ssid_single_player);
(*env)->SetObjectField(env, configObj, networkNameFid, strNetworkName);
(*env)->DeleteLocalRef(env, strNetworkName);
// Pairing
jfieldID ownerMacFid = (*env)->GetFieldID(env, configCls, "ownerMac", "Ljava/lang/String;");
jstring strOwnerMac = (*env)->NewStringUTF(env, ardrone_control_config.owner_mac);
(*env)->SetObjectField(env, configObj, ownerMacFid, strOwnerMac);
(*env)->DeleteLocalRef(env, strOwnerMac);
// Altitude Limit
jfieldID altitudeLimitedFid = (*env)->GetFieldID(env, configCls, "altitudeLimit", "I");
(*env)->SetIntField(env, configObj, altitudeLimitedFid, ardrone_control_config.altitude_max / 1000);
// Adaptive video
jfieldID adaptiveVideoFid = (*env)->GetFieldID(env, configCls, "adaptiveVideo", "Z");
(*env)->SetBooleanField(env, configObj, adaptiveVideoFid,
(ARDRONE_VARIABLE_BITRATE_MODE_DYNAMIC == ardrone_control_config.bitrate_ctrl_mode) ? TRUE : FALSE);
// Video codec
jfieldID videoCodecFid = (*env)->GetFieldID(env, configCls, "videoCodec", "I");
(*env)->SetIntField(env, configObj, videoCodecFid, (jint) (ardrone_control_config.video_codec));
//(*env)->SetIntField(env, configObj, videoCodecFid, (jint) (ardrone_control_config.codec));
// Outdoor hull
jfieldID outdoorHullFid = (*env)->GetFieldID(env, configCls, "outdoorHull", "Z");
(*env)->SetBooleanField(env, configObj, outdoorHullFid, ardrone_control_config.flight_without_shell ? TRUE : FALSE);
// Outdoor flight
jfieldID outdoorFlightFid = (*env)->GetFieldID(env, configCls, "outdoorFlight", "Z");
(*env)->SetBooleanField(env, configObj, outdoorFlightFid, ardrone_control_config.outdoor ? TRUE : FALSE);
// Yaw speed max
jfieldID yawSpeedMaxFid = (*env)->GetFieldID(env, configCls, "yawSpeedMax", "I");
(*env)->SetIntField(env, configObj, yawSpeedMaxFid, (jint)((float)round(ardrone_control_config.control_yaw * RAD_TO_DEG)));
// Vertical speed max
jfieldID vertSpeedMaxFid = (*env)->GetFieldID(env, configCls, "vertSpeedMax", "I");
(*env)->SetIntField(env, configObj, vertSpeedMaxFid, (jint)(ardrone_control_config.control_vz_max));
// Tilt
jfieldID tiltFid = (*env)->GetFieldID(env, configCls, "tilt", "I");
(*env)->SetIntField(env, configObj, tiltFid, (jint)((float)round(ardrone_control_config.euler_angle_max * RAD_TO_DEG)));
// Device tilt
jfieldID devceTiltFid = (*env)->GetFieldID(env, configCls, "deviceTiltMax", "I");
(*env)->SetIntField(env, configObj, devceTiltFid, (jint)((float)round(ardrone_control_config.control_iphone_tilt * RAD_TO_DEG)));
// Video record on USB
jfieldID recordOnUsbFid = (*env)->GetFieldID(env, configCls, "recordOnUsb", "Z");
(*env)->SetBooleanField(env, configObj, recordOnUsbFid, ardrone_control_config.video_on_usb);
} else {
LOGE(TAG, "takeConfigSnapshot: Illegal input parameter configObj. configObj == NULL");
}
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, configCls);
return configObj;
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_resetConfigToDefaults(JNIEnv *env, jobject obj)
{
ardrone_control_config.indoor_euler_angle_max = ardrone_application_default_config.indoor_euler_angle_max;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(indoor_euler_angle_max, &ardrone_control_config.indoor_euler_angle_max, NULL);
ardrone_control_config.indoor_control_vz_max = ardrone_application_default_config.indoor_control_vz_max;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(indoor_control_vz_max, &ardrone_control_config.indoor_control_vz_max, NULL);
ardrone_control_config.indoor_control_yaw = ardrone_application_default_config.indoor_control_yaw;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(indoor_control_yaw, &ardrone_control_config.indoor_control_yaw, NULL);
ardrone_control_config.outdoor_euler_angle_max = ardrone_application_default_config.outdoor_euler_angle_max;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(outdoor_euler_angle_max, &ardrone_control_config.outdoor_euler_angle_max, NULL);
ardrone_control_config.outdoor_control_vz_max = ardrone_application_default_config.outdoor_control_vz_max;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(outdoor_control_vz_max, &ardrone_control_config.outdoor_control_vz_max, NULL);
ardrone_control_config.outdoor_control_yaw = ardrone_application_default_config.outdoor_control_yaw;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(outdoor_control_yaw, &ardrone_control_config.outdoor_control_yaw, NULL);
ardrone_control_config.outdoor = ardrone_application_default_config.outdoor;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(outdoor, &ardrone_control_config.outdoor, NULL);
ardrone_control_config.euler_angle_max = ardrone_application_default_config.euler_angle_max;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(euler_angle_max, &ardrone_control_config.euler_angle_max, NULL);
ardrone_control_config.control_vz_max = ardrone_application_default_config.control_vz_max;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_vz_max, &ardrone_control_config.control_vz_max, NULL);
ardrone_control_config.control_yaw = ardrone_application_default_config.control_yaw;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_yaw, &ardrone_control_config.control_yaw, NULL);
ardrone_control_config.outdoor_euler_angle_max = ardrone_application_default_config.outdoor_euler_angle_max;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(euler_angle_max, &ardrone_control_config.euler_angle_max, NULL);
ardrone_control_config.control_vz_max = ardrone_application_default_config.control_vz_max;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_vz_max, &ardrone_control_config.control_vz_max, NULL);
ardrone_control_config.control_yaw = ardrone_application_default_config.control_yaw;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_yaw, &ardrone_control_config.control_yaw, NULL);
ardrone_control_config.control_iphone_tilt = ardrone_application_default_config.control_iphone_tilt;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(control_iphone_tilt, &ardrone_control_config.control_iphone_tilt, NULL);
ardrone_control_config.flight_without_shell = ardrone_application_default_config.flight_without_shell;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(flight_without_shell, &ardrone_control_config.flight_without_shell, NULL);
ardrone_control_config.altitude_max = ardrone_application_default_config.altitude_max;
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(altitude_max, &ardrone_control_config.altitude_max, NULL);
// ardrone_control_config.bitrate_ctrl_mode = ardrone_application_default_config.bitrate_ctrl_mode;
// ARDRONE_TOOL_CONFIGURATION_ADDEVENT(bitrate_ctrl_mode, &ardrone_control_config.bitrate_ctrl_mode, NULL);
//
// ardrone_control_config.video_codec = ardrone_application_default_config.video_codec;
// ARDRONE_TOOL_CONFIGURATION_ADDEVENT(video_codec, &ardrone_control_config.video_codec, NULL);
LOGD(TAG, "Reset config to defaults [OK]");
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_triggerConfigUpdateNative(JNIEnv *env, jobject obj)
{
LOGI(TAG, "requestConfigNative called");
configurationState = CONFIG_STATE_NEEDED;
if (configObj != NULL) {
(*env)->DeleteGlobalRef(env, configObj);
configObj = NULL;
}
configObj = (*env)->NewGlobalRef(env, obj);
}
JNIEXPORT jobject JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_switchCamera(JNIEnv *env, jobject obj)
{
parrot_ardrone_ctrl_switch_camera(NULL);
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_flatTrimNative(JNIEnv *env, jobject obj)
{
parrot_ardrone_ctrl_set_flat_trim();
}
JNIEXPORT jobject JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_takePhoto(JNIEnv *env, jobject obj)
{
if (ardrone_academy_navdata_get_camera_state() == TRUE) {
if (ardrone_academy_navdata_screenshot()) {
LOGD(TAG, "Screen Shot Request [OK]");
} else {
LOGW(TAG, "Screen Shot Request [FAILED]");
}
} else {
LOGW(TAG, "Camera is not ready!");
}
}
JNIEXPORT jobject JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_record(JNIEnv *env, jobject obj)
{
bool_t record_state = ardrone_academy_navdata_get_record_ready();
if (TRUE == ardrone_academy_navdata_record())
{
if (record_state)
{
video_stage_encoded_recorder_enable (0, 0);
}
}
}
JNIEXPORT jobject JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_calibrateMagneto(JNIEnv *env, jobject obj)
{
ardrone_at_set_calibration (ARDRONE_CALIBRATION_DEVICE_MAGNETOMETER);
}
JNIEXPORT jobject JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_doFlip(JNIEnv *env, jobject obj)
{
string_t anim = "18,15";
ARDRONE_TOOL_CONFIGURATION_ADDEVENT(flight_anim, anim, NULL);
}
JNIEXPORT jobject JNICALL
Java_com_parrot_freeflight_drone_DroneProxy_setLocation(JNIEnv *env, jobject obj, jdouble lat, jdouble lon, jdouble alt)
{
if(gpsState == CONFIG_STATE_IDLE)
{
gpsInfo.latitude = lat;
gpsInfo.longitude = lon;
gpsInfo.altitude = alt;
gpsState = CONFIG_STATE_NEEDED;
LOGD(TAG, "GPS Config Message Received");
}
}
/*
* ftp_client_stub.c
*
* Created on: Jul 26, 2011
* Author: "Dmytro Baryskyy"
*/
#include "common.h"
#include <utils/ardrone_ftp.h>
static char* TAG = "ftp_client_stub";
static _ftp_status get_ftp_status(JNIEnv *env, jobject obj)
{
jclass ftpClientClass = (*env)->GetObjectClass(env, obj);
jfieldID ftpStatusFid = (*env)->GetFieldID(env, ftpClientClass, "ftpStatus", "I");
jlong ftpStatus = (*env)->GetIntField(env, obj, ftpStatusFid);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, ftpClientClass);
return (_ftp_status) ftpStatus;
}
static _ftp_t* get_ftp_handle(JNIEnv *env, jobject obj)
{
jclass ftpClientClass = (*env)->GetObjectClass(env, obj);
jfieldID connectionHandleFid = (*env)->GetFieldID(env, ftpClientClass, "connectionHandle", "I");
jint connectionHandle = (*env)->GetIntField(env, obj, connectionHandleFid);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, ftpClientClass);
_ftp_t* ftp_handle = (_ftp_t*) connectionHandle;
if (ftp_handle != NULL)
{
if (ftp_handle->tag != NULL)
{
// Deleting old reference to java ftpclient object
jobject caller = ftp_handle->tag;
(*env)->DeleteGlobalRef(env, caller);
ftp_handle->tag = NULL;
}
// we need to save the object that is calling this method in order to call callback of this object
ftp_handle->tag = (*env)->NewGlobalRef(env, obj);
}
return ftp_handle;
}
static void update_ftp_status_field(JNIEnv *env, jobject obj, _ftp_status status)
{
jclass ftpClientClass = (*env)->GetObjectClass(env, obj);
jfieldID ftpStatusFid = (*env)->GetFieldID(env, ftpClientClass, "ftpStatus", "I");
(*env)->SetIntField(env, obj, ftpStatusFid, (jint)status);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, ftpClientClass);
}
static void update_conn_handler_field(JNIEnv *env, jobject obj, _ftp_t* handle)
{
jclass ftpClientClass = (*env)->GetObjectClass(env, obj);
jfieldID connectionHandleFid = (*env)->GetFieldID(env, ftpClientClass, "connectionHandle", "I");
(*env)->SetIntField(env, obj, connectionHandleFid, (jint)handle);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, ftpClientClass);
}
static void wrapperCallback (_ftp_status status, void *arg, _ftp_t *ftp)
{
JNIEnv* jniEnv = NULL;
jobject obj = ftp->tag;
if (obj == NULL) {
LOGW(TAG, "wrapperCallback(). Can't call callback. Env or object is null");
return;
}
if (g_vm)
{
(*g_vm)->AttachCurrentThread (g_vm, (JNIEnv **) &jniEnv, NULL);
}
else
{
LOGW(TAG, "g_vm is not available!");
}
jclass cls = (*jniEnv)->GetObjectClass(jniEnv, obj);
jmethodID mid = (*jniEnv)->GetMethodID(jniEnv, cls, "callback", "(IFLjava/lang/String;)V");
if (mid == 0) {
LOGE(TAG, "Can't find method callback");
return;
}
jint locStatus = (jint)status;
jfloat progress = 0.0f;
jstring fileList = NULL;
jint operation = 0;
if (FTP_PROGRESS == locStatus && NULL != arg)
{
progress = *(jfloat *)arg;
fileList = NULL;
}
(*jniEnv)->CallVoidMethod(jniEnv, obj, mid, locStatus, progress, fileList);
// Removing reference to the class instance
(*jniEnv)->DeleteLocalRef(jniEnv, cls);
if (g_vm)
{
(*g_vm)->DetachCurrentThread (g_vm);
}
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_ftp_FTPClient_ftpConnect(JNIEnv *env, jobject obj, jstring ip, jint port, jstring username, jstring password)
{
_ftp_status initResult = FTP_FAIL;
const char *ipAddressASCII = (*env)->GetStringUTFChars(env, ip, NULL);
const char *usernameASCII = (*env)->GetStringUTFChars(env, username, NULL);
const char *passwordASCII = (*env)->GetStringUTFChars(env, password, NULL);
if (ipAddressASCII == NULL || usernameASCII == NULL) {
return FALSE; /* OutOfMemoryError already thrown */
}
_ftp_t* ftp = ftpConnect (ipAddressASCII,
port,
usernameASCII,
passwordASCII,
&initResult);
(*env)->ReleaseStringUTFChars(env, ip, ipAddressASCII);
(*env)->ReleaseStringUTFChars(env, username, usernameASCII);
(*env)->ReleaseStringUTFChars(env, password, passwordASCII);
update_ftp_status_field(env, obj, initResult);
update_conn_handler_field(env, obj, ftp);
return FTP_SUCCEDED(initResult)?TRUE:FALSE;
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_ftp_FTPClient_ftpDisconnect(JNIEnv *env, jobject obj)
{
_ftp_t* ftp = get_ftp_handle(env, obj);
if (ftp == NULL) {
LOGW(TAG, "ftpDisconnect: Connection is null");
return FALSE;
}
if (ftp->tag != NULL) {
jobject tag = ftp->tag;
(*env)->DeleteGlobalRef(env, tag);
ftp->tag = NULL;
}
_ftp_status status = ftpClose(&ftp);
update_ftp_status_field(env, obj, status);
update_conn_handler_field(env, obj, ftp);
return FTP_SUCCEDED(status)?TRUE:FALSE;
}
static jboolean
ftp_ftpGet(JNIEnv *env, jobject obj, ftp_callback callback, jstring remoteName, jstring localName, jboolean useResume)
{
if (localName == NULL || remoteName == NULL) {
LOGW(TAG, "ftpGet() failed. Invalid parameters.");
return FALSE;
}
const char *localNameASCII = (*env)->GetStringUTFChars(env, localName, NULL);
_ftp_t* ftp = get_ftp_handle(env, obj);
const char *remoteNameASCII = (*env)->GetStringUTFChars(env, remoteName, NULL);
if (ftp == NULL) {
LOGW(TAG, "ftpGet: Connection is null");
return FALSE;
}
_ftp_status status = ftpGet(ftp, remoteNameASCII, localNameASCII, useResume == TRUE ? 1 : 0, callback);
update_ftp_status_field(env, obj, status);
update_conn_handler_field(env, obj, ftp);
return (FTP_SUCCEDED(status)?TRUE:FALSE);
}
static jboolean
ftp_ftpPut(JNIEnv *env, jobject obj, ftp_callback callback, jstring localName, jstring remoteName, jboolean useResume)
{
if (localName == NULL || remoteName == NULL) {
LOGW(TAG, "ftpPut() failed. Invalid parameters.");
return FALSE;
}
_ftp_t* ftp = get_ftp_handle(env, obj);
if (ftp == NULL) {
LOGW(TAG, "ftpPut: Connection is null");
return FALSE;
}
const char *localNameASCII = (*env)->GetStringUTFChars(env, localName, NULL);
const char *remoteNameASCII = (*env)->GetStringUTFChars(env, remoteName, NULL);
_ftp_status status = ftpPut(ftp, localNameASCII, remoteNameASCII, useResume==TRUE?1:0, callback);
(*env)->ReleaseStringUTFChars(env, localName, localNameASCII);
(*env)->ReleaseStringUTFChars(env, remoteName, remoteNameASCII);
update_ftp_status_field(env, obj, status);
update_conn_handler_field(env, obj, ftp);
return (FTP_SUCCEDED(status)?TRUE:FALSE);
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_ftp_FTPClient_ftpIsConnected(JNIEnv *env, jobject obj)
{
_ftp_t* ftp = get_ftp_handle(env, obj);
if (ftp != NULL) {
return (ftp->connected>0?TRUE:FALSE);
}
return FALSE;
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_ftp_FTPClient_ftpAbort(JNIEnv *env, jobject obj)
{
_ftp_t* ftp = get_ftp_handle(env, obj);
if (ftp == NULL) {
LOGW(TAG, "ftpAbort: Connection is null");
return FALSE;
}
if (ftp->tag != NULL) {
jobject tag = ftp->tag;
(*env)->DeleteGlobalRef(env, tag);
ftp->tag = NULL;
}
_ftp_status status = ftpAbort(ftp);
update_ftp_status_field(env, obj, status);
update_conn_handler_field(env, obj, ftp);
return (FTP_SUCCEDED(status)?TRUE:FALSE);
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_ftp_FTPClient_ftpPut(JNIEnv *env, jobject obj, jstring localName, jstring remoteName, jboolean useResume)
{
return ftp_ftpPut(env, obj, wrapperCallback, localName, remoteName, useResume);
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_ftp_FTPClient_ftpPutSync(JNIEnv *env, jobject obj, jstring localName, jstring remoteName, jboolean useResume)
{
return ftp_ftpPut(env, obj, NULL, localName, remoteName, useResume);
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_ftp_FTPClient_ftpGet(JNIEnv *env, jobject obj, jstring remoteName, jstring localName, jboolean useResume)
{
return ftp_ftpGet(env, obj, wrapperCallback, remoteName, localName, useResume);
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_ftp_FTPClient_ftpGetSync(JNIEnv *env, jobject obj, jstring remoteName, jstring localName, jboolean useResume)
{
return ftp_ftpGet(env, obj, NULL, remoteName, localName, useResume);
}
/*
* gl_bg_video_sprite_stub.c
*
* Created on: Feb 1, 2012
* Author: "Dmytro Baryskyy"
*/
#include "common.h"
#include <android/bitmap.h>
#include "gl_bg_video_sprite_stub.h"
#include "../Callbacks/java_callbacks.h"
static char* TAG = "gl_bg_video_sprite";
static opengl_size_t screen_size;
static bool_t recalculate_video_texture = FALSE;
static int32_t current_num_picture_decoded = 0;
static int32_t current_num_frames = 0;
opengl_scaling scaling;
static opengl_texture_t texture;
static bool_t texture_initialized = FALSE;
static void init_texture()
{
vp_os_memset(&texture, 0, sizeof(opengl_texture_t));
}
static void recalculate_video_texture_bounds(JNIEnv *env, jobject obj, opengl_texture_t* texture)
{
java_set_field_int(env, obj, "imageWidth", texture->image_size.width);
java_set_field_int(env, obj, "imageHeight", texture->image_size.height);
java_set_field_int(env, obj, "textureWidth", texture->texture_size.width);
java_set_field_int(env, obj, "textureHeight", texture->texture_size.height);
}
void opengl_texture_scale_compute(opengl_texture_t *texture, opengl_size_t screen_size, opengl_scaling scaling)
{
LOGD(TAG, "%s sizes %f, %f, %f, %f\n", __FUNCTION__, texture->image_size.width, texture->image_size.height, texture->texture_size.width, texture->texture_size.height);
switch(scaling)
{
case NO_SCALING:
texture->scaleModelX = texture->image_size.height / screen_size.width;
texture->scaleModelY = texture->image_size.width / screen_size.height;
break;
case FIT_X:
texture->scaleModelX = (screen_size.height * texture->image_size.height) / (screen_size.width * texture->image_size.width);
texture->scaleModelY = 1.0f;
break;
case FIT_Y:
texture->scaleModelX = 1.0f;
texture->scaleModelY = (screen_size.width * texture->image_size.width) / (screen_size.height * texture->image_size.height);
break;
default:
texture->scaleModelX = 1.0f;
texture->scaleModelY = 1.0f;
break;
}
texture->scaleTextureX = texture->image_size.width / (float)texture->texture_size.width;
texture->scaleTextureY = texture->image_size.height / (float)texture->texture_size.height;
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_freeflight_ui_gl_GLBGVideoSprite_onUpdateVideoTextureNative(JNIEnv *env, jobject obj, jint program, jint textureId)
{
if (texture_initialized == FALSE) {
init_texture();
texture_initialized = TRUE;
}
opengl_video_stage_config_t *config = opengl_video_stage_get();
if ((config != NULL) && (config->data != NULL) && (config->num_picture_decoded > current_num_picture_decoded))
{
if (texture.image_size.width != config->widthImage) {
recalculate_video_texture = TRUE;
}
texture.bytesPerPixel = config->bytesPerPixel;
texture.image_size.width = config->widthImage;
texture.image_size.height = config->heightImage;
texture.texture_size.width = config->widthTexture;
texture.texture_size.height = config->heightTexture;
texture.format = config->format;
texture.type = config->type;
texture.data = config->data;
texture.state = OPENGL_STATE_GENERATED;
current_num_picture_decoded = config->num_picture_decoded;
current_num_frames = config->num_frames;
}
if (recalculate_video_texture) {
recalculate_video_texture_bounds(env, obj, &texture);
recalculate_video_texture = FALSE;
}
if(texture.state == OPENGL_STATE_GENERATED)
{
// Load the texture in the GPU
if (texture.data != NULL) {
// LOGD("GL_BG_VIDEO_SPRITE", "fmt: %d, w: %f, h: %f, type: %d, data: %p", texture.format, texture.texture_size.width, texture.texture_size.height, texture.type, texture.data);
glTexImage2D(GL_TEXTURE_2D, 0, texture.format, texture.texture_size.width, texture.texture_size.height, 0, texture.format, texture.type, texture.data);
texture.state = OPENGL_STATE_SEND_TO_GPU;
return TRUE;
}
}
return FALSE;
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_ui_gl_GLBGVideoSprite_onSurfaceChangedNative(JNIEnv *env, jobject obj, jint width, jint height)
{
screen_size.width = width;
screen_size.height = height;
recalculate_video_texture = TRUE;
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_freeflight_ui_gl_GLBGVideoSprite_getVideoFrameNative(JNIEnv *env, jobject obj, jobject bitmap, jfloatArray videoSize)
{
AndroidBitmapInfo info;
void* pixels;
int ret;
jboolean result = FALSE;
if (screen_size.width == 0 || screen_size.height == 0)
return FALSE;
if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
return FALSE;
}
if (info.format != ANDROID_BITMAP_FORMAT_RGB_565) {
return FALSE;
}
opengl_video_stage_config_t *config = opengl_video_stage_get();
if ((config != NULL) && (config->data != NULL) && (config->num_picture_decoded > current_num_picture_decoded))
{
if (texture.image_size.width != config->widthImage) {
recalculate_video_texture = TRUE;
}
texture.bytesPerPixel = config->bytesPerPixel;
texture.image_size.width = config->widthImage;
texture.image_size.height = config->heightImage;
texture.texture_size.width = config->widthTexture;
texture.texture_size.height = config->heightTexture;
texture.format = config->format;
texture.type = config->type;
texture.data = config->data;
texture.state = OPENGL_STATE_GENERATED;
current_num_picture_decoded = config->num_picture_decoded;
current_num_frames = config->num_frames;
}
if (recalculate_video_texture && screen_size.width != 0 && screen_size.height != 0) {
opengl_texture_scale_compute(&texture, screen_size, FIT_X);
LOGD("VIDEO", "Screen Widht: %f", screen_size.width);
recalculate_video_texture = FALSE;
}
if (texture.state == OPENGL_STATE_GENERATED)
{
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
}
result = TRUE;
memcpy(pixels, texture.data, texture.image_size.width * texture.image_size.height * texture.bytesPerPixel);
texture.state = OPENGL_STATE_SEND_TO_GPU;
jfloat *body = (*env)->GetFloatArrayElements(env, videoSize, 0);
body[0] = (float)texture.image_size.width;
body[1] = (float)texture.image_size.height;
body[2] = (float)texture.scaleModelX;
body[3] = (float)texture.scaleModelY;
(*env)->ReleaseFloatArrayElements(env, videoSize, body, 0);
AndroidBitmap_unlockPixels(env, bitmap);
}
return result;
}
/*
* gl_bg_video_sprite_stub.h
*
* Created on: Jul 27, 2012
* Author: "Dmytro Baryskyy"
*/
#ifndef GL_BG_VIDEO_SPRITE_STUB_H_
#define GL_BG_VIDEO_SPRITE_STUB_H_
typedef enum
{
NO_SCALING,
FIT_X,
FIT_Y,
FIT_XY
} opengl_scaling;
typedef enum
{
OPENGL_STATE_INITIALIZED = 0,
OPENGL_STATE_GENERATED,
OPENGL_STATE_SEND_TO_GPU
} opengl_state;
typedef struct
{
GLfloat width;
GLfloat height;
} opengl_size_t;
typedef struct
{
opengl_size_t image_size;
opengl_size_t texture_size;
GLfloat scaleModelX;
GLfloat scaleModelY;
GLfloat scaleTextureX;
GLfloat scaleTextureY;
GLuint bytesPerPixel;
GLenum format;
GLenum type;
void* data;
GLuint textureId[2];
GLuint vertexBufferId;
GLuint indexBufferId;
opengl_state state;
} opengl_texture_t;
#endif /* GL_BG_VIDEO_SPRITE_STUB_H_ */
/*
* plf_file_stub.c
*
* Created on: Aug 30, 2011
* Author: Dmytro Baryskyy
*/
#include "common.h"
#include "../Plf/plf.h"
static char* TAG = {"PLF_FILE_STUB\0"};
static jbyteArray getHeader(JNIEnv* env, jobject obj, jint size)
{
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID mid = (*env)->GetMethodID(env, cls, "getHeader", "(I)[B");
if (mid == 0) {
LOGW(TAG, "Method not found");
return NULL;
}
// Getting the file header from java code
jbyteArray result = (*env)->CallObjectMethod(env, obj, mid, size);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, cls);
return result;
}
JNIEXPORT jstring JNICALL
Java_com_parrot_plf_PlfFile_getVersionNative(JNIEnv *env, jobject obj)
{
jbyteArray headerDataArray = getHeader(env, obj, sizeof(plf_phdr));
if (headerDataArray == NULL) {
LOGE(TAG, "Can't get plf header");
return NULL;
}
jbyte* const rawHeader = (*env)->GetByteArrayElements( env, headerDataArray , 0);
plf_phdr plf_header;
memcpy(&plf_header, rawHeader, sizeof(plf_phdr));
(*env)->ReleaseByteArrayElements(env, headerDataArray, rawHeader, JNI_ABORT);
char version[256] = {0};
sprintf(version, "%d.%d.%d", plf_header.p_ver, plf_header.p_edit, plf_header.p_ext);
LOGI(TAG, "Version of plf file: %s", version);
return (*env)->NewStringUTF(env, version);
}
/*
* transcoding_service_stub.c
*
* Created on: Apr 6, 2012
* Author: "Dmytro Baryskyy"
*/
#include <common.h>
#include "transcoding_service_stub.h"
#include "video_stage_io_file.h"
#include <libswscale/swscale.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <utils/ardrone_video_atoms.h>
#define NB_STAGES 5
#define ENCODER_PRIORITY (15)
#define STREAM_DURATION 5.0
#define STREAM_FRAME_RATE 20 /* 20 images/s */
#define STREAM_NB_FRAMES ((int)(STREAM_DURATION * STREAM_FRAME_RATE))
static const char* TAG = "TranscodingServiceNative";
static bool_t encoder_stage_in_pause = FALSE;
static bool_t encoder_sgate_stop_requested = FALSE;
static vp_os_cond_t encoder_stage_condition;
static vp_os_mutex_t encoder_stage_mutex;
static THREAD_HANDLE encoder_thread;
/*- LibAVFormat variables */
const char *filename;
static AVFrame *picture = NULL, *tmp_picture = NULL;
static AVOutputFormat *fmt = NULL;
static AVFormatContext *oc = NULL;
static AVStream *video_st = NULL;
double video_pts;
#define STREAM_BIT_RATE_KBITS 1600
static const int sws_flags = SWS_BICUBIC;
uint8_t *video_outbuf=NULL;
int frame_count=0, video_outbuf_size=0;
const vp_api_stage_funcs_t encoder_stage_funcs =
{
(vp_api_stage_handle_msg_t) encoder_handle,
(vp_api_stage_open_t) encoder_stage_open,
(vp_api_stage_transform_t) encoder_stage_transform,
(vp_api_stage_close_t) encoder_stage_close
};
struct
{
int width,height;
char* buffer;
int frame_number;
} previous_frame;
void encoderThreadResume()
{
vp_os_mutex_lock(&encoder_stage_mutex);
vp_os_cond_signal(&encoder_stage_condition);
encoder_stage_in_pause = FALSE;
LOGV(TAG, "Encoder stage resumed");
vp_os_mutex_unlock(&encoder_stage_mutex);
}
void encoderThreadStop()
{
vp_os_mutex_lock(&encoder_stage_mutex);
encoder_sgate_stop_requested = TRUE;
LOGV(TAG, "Encoder stage stop requested");
vp_os_mutex_unlock(&encoder_stage_mutex);
if (encoder_stage_in_pause) {
encoderThreadResume();
}
}
JNIEXPORT void JNICALL
Java_com_parrot_freeflight_transcodeservice_TranscodingService_encoderThreadStart(JNIEnv *env, jobject obj)
{
LOGD(TAG, "Initializing encoder");
encoder_stage_in_pause = FALSE;
encoder_sgate_stop_requested = FALSE;
vp_os_mutex_init(&encoder_stage_mutex);
vp_os_cond_init(&encoder_stage_condition, &encoder_stage_mutex);
jobject serviceHandle = (*env)->NewGlobalRef(env, obj);
LOGD(TAG, "Starting encoder thread...");
vp_os_thread_create (thread_encoder, serviceHandle, &encoder_thread);
}
static int encoder_expand_buffer_x2_yuv420p(/*Input*/uint8_t*in_buf,
/*output*/uint8_t*out_buf,
int width,
int height,
int rowstride)
{
int row,col;
uint32_t *src,*dst1,*dst2;
uint32_t r,w1,w2;
// src =(uint32_t*) ( in ); //+ 1*row*width );
/* Expand the buffer */
for (row=0;row<height;row++)
{
src = (uint32_t*) (in_buf + (rowstride * row));
dst1=(uint32_t*) ( out_buf + (2*row+0)*2*width );
dst2=(uint32_t*) ( out_buf + (2*row+1)*2*width );
/* Takes 4 points 'abcd' and writes 'aabb' 'ccdd'
* 'aabb' 'ccdd'
*/
for (col=0;col<width/4;col++)
{
r=*(src++);
w1= ( (r&0x000000FF) ) | ( (r&0x000000FF)<<8 ) | ((r&0x0000FF00)<<8 ) | ( (r&0x0000FF00)<<16 );
w2= ( (r&0x00FF0000) >> 16) | ( (r&0x00FF0000)>>8 ) | ((r&0xFF000000)>>8 ) | ( (r&0xFF000000) );
*(dst1++)=w1; *(dst1++)=w2;
*(dst2++)=w1; *(dst2++)=w2;
}
}
return 0;
}
static C_RESULT create_video_file(const char*filename, int width, int height, int frame_rate, enum PixelFormat pix_fmt)
{
/* auto detect the output format from the name. default is mpeg. */
avformat_alloc_output_context2(&oc, NULL, "mp4", filename);
if (!oc) {
LOGW(TAG, "Could not deduce output format from file extension: using MPEG.");
avformat_alloc_output_context2(&oc, NULL, "mpeg", filename);
}
if (!oc) {
LOGW(TAG, "Memory error");
return C_FAIL;
}
LOGD(TAG, "Output Context Created [OK]");
/* add the audio and video streams using the default format codecs
and initialize the codecs */
video_st = NULL;
fmt = oc->oformat;
if (fmt->video_codec != CODEC_ID_MPEG4) {
if (fmt->video_codec == CODEC_ID_H263) {
LOGD(TAG, "Guessed codec is CODEC_ID_H263");
} else {
LOGW(TAG, "Guessed codec is not MPEG4. It is %d", fmt->video_codec);
}
}
LOGD(TAG, "Using codec: %s ", fmt->long_name);
LOGD(TAG, "Codec ID: %s", fmt->video_codec == CODEC_ID_MPEG4?"CODEC_ID_MPEG4":"UNKNOWN");
if (fmt->video_codec != CODEC_ID_NONE) {
LOGD(TAG, "Adding video stream. Width: %d Height: %d Frame rate: %d", width, height, frame_rate);
video_st = add_video_stream(oc, fmt->video_codec, width, height, frame_rate, pix_fmt);
if (video_st == NULL) {
LOGW(TAG, "Could not add video stream.");
return C_FAIL;
}
}
// av_dump_format(oc, 0, filename, 1);
// LOGV(TAG, "av_dump_format [OK]");
/* now that all the parameters are set, we can open the audio and
video codecs and allocate the necessary encode buffers */
if (video_st) {
if (VP_FAILED(open_video(oc, video_st))) {
LOGW(TAG, "Can't open video");
return C_FAIL;
}
} else {
LOGW(TAG, "Video_st is null");
}
int res = avio_check(filename, AVIO_FLAG_WRITE);
LOGW(TAG, "avio_check == %d", res);
/* open the output file, if needed */
if (!(fmt->flags & AVFMT_NOFILE)) {
int result = avio_open(&oc->pb, filename, AVIO_FLAG_WRITE);
if (result < 0) {
char error[256] = {0};
av_strerror(result, error, 256);
LOGW(TAG, "Could not open '%s', error: %d %s", filename, result, error);
return C_FAIL;
} else {
LOGV(TAG, "avio_open [OK]");
}
}
/* write the stream header, if any */
av_write_header(oc);
LOGD(TAG, "Create video file [OK]");
return C_OK;
}
static void close_video_file()
{
/* write the trailer, if any. the trailer must be written
* before you close the CodecContexts open when you wrote the
* header; otherwise write_trailer may try to use memory that
* was freed on av_codec_close() */
av_write_trailer(oc);
/* close each codec */
if (video_st)
close_video(oc, video_st);
int i=0;
/* free the streams */
for(i = 0; i < oc->nb_streams; i++) {
av_freep(&oc->streams[i]->codec);
av_freep(&oc->streams[i]);
}
if (!(fmt->flags & AVFMT_NOFILE)) {
/* close the output file */
avio_close(oc->pb);
}
/* free the stream */
av_free(oc);
}
static C_RESULT open_video(AVFormatContext *oc, AVStream *st)
{
if (oc == NULL || st == NULL) {
LOGW(TAG, "Wrong parameters");
return C_FAIL;
}
AVCodec *codec;
AVCodecContext *c;
c = st->codec;
/* find the video encoder */
codec = avcodec_find_encoder(c->codec_id);
if (!codec) {
LOGW(TAG, "Codec not found");
return C_FAIL;
}
LOGD(TAG, "Find encoder [OK]");
/* open the codec */
if (avcodec_open(c, codec) < 0) {
LOGW(TAG, "Could not open codec");
return C_FAIL;
}
LOGD(TAG, "Open codec [OK]");
video_outbuf = NULL;
if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) {
/* allocate output buffer */
/* XXX: API change will be done */
/* buffers passed into lav* can be allocated any way you prefer,
as long as they're aligned enough for the architecture, and
they're freed appropriately (such as using av_free for buffers
allocated with av_malloc) */
video_outbuf_size = 200000;
video_outbuf = av_malloc(video_outbuf_size);
}
LOGD(TAG, "Allocate video outbuff [OK]");
picture = alloc_picture(c->pix_fmt, c->width, c->height);
LOGD(TAG, "Allocate picture [OK]");
if (!picture) {
LOGW(TAG, "Could not allocate picture");
return C_FAIL;
}
/* if the output format is not YUV420P, then a temporary YUV420P
picture is needed too. It is then converted to the required
output format */
tmp_picture = NULL;
if (c->pix_fmt != PIX_FMT_YUV420P) {
tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height);
if (!tmp_picture) {
LOGW(TAG, "Could not allocate temporary picture");
return C_FAIL;
}
}
return C_OK;
}
static AVFrame *alloc_picture(enum PixelFormat pix_fmt, int width, int height)
{
AVFrame *picture;
uint8_t *picture_buf;
int size;
picture = avcodec_alloc_frame();
if (!picture)
return NULL;
size = avpicture_get_size(pix_fmt, width, height);
picture_buf = av_malloc(size);
if (!picture_buf) {
av_free(picture);
return NULL;
}
avpicture_fill((AVPicture *)picture, picture_buf,
pix_fmt, width, height);
return picture;
}
/**************************************************************/
/* video output */
/* add a video output stream */
static AVStream *add_video_stream(AVFormatContext *oc, enum CodecID codec_id, int width, int height,int frame_rate, enum PixelFormat pix_fmt)
{
AVCodecContext *c;
AVStream *st;
st = av_new_stream(oc, 0);
if (!st) {
LOGW(TAG, "Could not alloc stream");
return NULL;
}
c = st->codec;
c->codec_id = codec_id;
c->codec_type = AVMEDIA_TYPE_VIDEO;
/* put sample parameters */
c->bit_rate = (STREAM_BIT_RATE_KBITS)*1000;
/* resolution must be a multiple of two */
c->width = width;
c->height = height;
/* time base: this is the fundamental unit of time (in seconds) in terms
of which frame timestamps are represented. for fixed-fps content,
timebase should be 1/framerate and timestamp increments should be
identically 1. */
c->time_base.den = frame_rate;
c->time_base.num = 1;
c->gop_size = 12; /* emit one intra frame every twelve frames at most */
c->pix_fmt = pix_fmt;
if (c->codec_id == CODEC_ID_MPEG2VIDEO) {
/* just for testing, we also add B frames */
c->max_b_frames = 2;
}
if (c->codec_id == CODEC_ID_MPEG1VIDEO){
/* Needed to avoid using macroblocks in which some coeffs overflow.
This does not happen with normal video, it just happens here as
the motion of the chroma plane does not match the luma plane. */
c->mb_decision=2;
}
// some formats want stream headers to be separate
if(oc->oformat->flags & AVFMT_GLOBALHEADER)
c->flags |= CODEC_FLAG_GLOBAL_HEADER;
return st;
}
static C_RESULT write_video_frame(AVFormatContext *oc, AVStream *st)
{
int out_size, ret;
AVCodecContext *c;
static struct SwsContext *img_convert_ctx;
//printf("Here0 \n");
c = st->codec;
if (frame_count >= STREAM_NB_FRAMES) {
/* no more frame to compress. The codec has a latency of a few
frames if using B frames, so we get the last frames by
passing the same picture again */
} else {
if (c->pix_fmt != PIX_FMT_YUV420P) {
/* as we only generate a YUV420P picture, we must convert it
to the codec pixel format if needed */
if (img_convert_ctx == NULL) {
#if (LIBSWSCALE_VERSION_INT<AV_VERSION_INT(0,12,0))
img_convert_ctx = sws_getContext(c->width, c->height,
PIX_FMT_YUV420P,
c->width, c->height,
c->pix_fmt,
sws_flags, NULL, NULL, NULL);
#else
img_convert_ctx = sws_alloc_context();
if (img_convert_ctx == NULL) {
LOGW(TAG, "Cannot initialize the conversion context");
return C_FAIL;
}
/* see http://permalink.gmane.org/gmane.comp.video.ffmpeg.devel/118362 */
/* see http://ffmpeg-users.933282.n4.nabble.com/Documentation-for-sws-init-context-td2956723.html */
av_set_int(img_convert_ctx, "srcw", c->width);
av_set_int(img_convert_ctx, "srch", c->height);
av_set_int(img_convert_ctx, "dstw", c->width);
av_set_int(img_convert_ctx, "dsth", c->height);
av_set_int(img_convert_ctx, "src_format", PIX_FMT_YUV420P);
av_set_int(img_convert_ctx, "dst_format", c->pix_fmt);
av_set_int(img_convert_ctx, "param0", 0);
av_set_int(img_convert_ctx, "param1", 0);
av_set_int(img_convert_ctx, "flags", sws_flags);
sws_init_context(img_convert_ctx,NULL,NULL);
#endif
}
sws_scale(img_convert_ctx, (const uint8_t* const *)tmp_picture->data,
tmp_picture->linesize,
0, c->height, picture->data, picture->linesize);
} else {
}
}
if (oc->oformat->flags & AVFMT_RAWPICTURE) {
/* raw video case. The API will change slightly in the near
futur for that */
AVPacket pkt;
av_init_packet(&pkt);
pkt.flags |= AV_PKT_FLAG_KEY;
pkt.stream_index= st->index;
pkt.data= (uint8_t *)picture;
pkt.size= sizeof(AVPicture);
ret = av_interleaved_write_frame(oc, &pkt);
} else {
/* encode the image */
out_size = avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture);
/* if zero size, it means the image was buffered */
if (out_size > 0) {
AVPacket pkt;
av_init_packet(&pkt);
if (c->coded_frame->pts != AV_NOPTS_VALUE)
pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
if(c->coded_frame->key_frame)
pkt.flags |= AV_PKT_FLAG_KEY;
pkt.stream_index= st->index;
pkt.data= video_outbuf;
pkt.size= out_size;
/* write the compressed frame in the media file */
ret = av_interleaved_write_frame(oc, &pkt);
} else {
ret = 0;
}
}
if (ret != 0) {
LOGW(TAG, "Error while writing video frame");
return C_FAIL;
}
frame_count++;
return C_OK;
}
static void close_video(AVFormatContext *oc, AVStream *st)
{
avcodec_close(st->codec);
av_free(picture);
picture = NULL;
if (tmp_picture) {
av_free(tmp_picture->data[0]);
av_free(tmp_picture);
tmp_picture=NULL;
}
av_free(video_outbuf);
}
static jstring encoder_stage_get_next_file(JNIEnv* env, jobject obj, const char* extention)
{
jclass cls = (*env)->GetObjectClass(env, obj);
jmethodID mid = (*env)->GetMethodID(env, cls, "getNextFile", "()Ljava/lang/String;");
if (mid == 0) {
LOGW(TAG, "Method not found");
return NULL;
}
jstring result = (*env)->CallObjectMethod(env, obj, mid);
// Removing reference to the class instance
(*env)->DeleteLocalRef(env, cls);
return result;
}
static void notify_media_ready(JNIEnv* env, jobject obj, encoder_stage_config_t* cfg)
{
LOGD(TAG, "Notifying about new media available");
parrot_java_callbacks_call_void_method_string(env, obj, "onMediaReady", cfg->file_dest);
}
C_RESULT
encoder_handle (encoder_stage_config_t * cfg, PIPELINE_MSG msg_id, void *callback, void *param)
{
LOGV(TAG, "FFMPEG recorder message handler.");
switch (msg_id)
{
case PIPELINE_MSG_START:
if(cfg->startRec==VIDEO_RECORD_STOP)
cfg->startRec=VIDEO_RECORD_HOLD;
else
cfg->startRec=VIDEO_RECORD_STOP;
break;
default:
break;
}
return (VP_SUCCESS);
}
C_RESULT encoder_stage_open(encoder_stage_config_t *cfg)
{
LOGV(TAG, "Encoder Stage Open called");
avcodec_init();
av_register_all();
cfg->file_dest = strncpy(cfg->file_dest, cfg->file_src, strlen(cfg->file_src));
cfg->file_dest[strlen(cfg->file_src)-3] = '\0';
cfg->file_dest = strncat(cfg->file_dest, "bak", 3);
cfg->video_file_open = 0;
LOGD(TAG, "Destination file name: %s", cfg->file_dest);
if (VP_FAILED(create_video_file(cfg->file_dest, 320, 240, STREAM_FRAME_RATE, PIX_FMT_YUV420P))) {
return C_FAIL;
}
cfg->video_file_open = 1;
LOGD(TAG, "File opened [OK]");
return C_OK;
}
C_RESULT encoder_stage_transform(encoder_stage_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
{
C_RESULT result = C_FAIL;
if (in == NULL || out == NULL || cfg == NULL) {
LOGE(TAG, "One of input parameters to encoder_sgate_transform is NULL");
return result;
}
vp_os_mutex_lock(&out->lock);
if( out->status == VP_API_STATUS_INIT )
{
cfg->previous_num_picture_decoded = 0;
out->status = VP_API_STATUS_PROCESSING;
}
if( in->status == VP_API_STATUS_ENDED )
{
out->status = in->status;
result = C_OK;
}
if(out->status == VP_API_STATUS_PROCESSING)
{
if(cfg->vlib_stage_decoding_config->num_picture_decoded > cfg->previous_num_picture_decoded)
{
if(!cfg->first_frame_ok)
{
if(cfg->vlib_stage_decoding_config->controller.picture_type == VIDEO_PICTURE_INTRA)
{
cfg->first_frame_ok = TRUE;
cfg->starting_num_frames = cfg->vlib_stage_decoding_config->controller.num_frames;
LOGV(TAG, "Starting session");
}
result = C_OK;
}
if(cfg->first_frame_ok)
{
if (picture != NULL)
{
uint8_t *y_buf = NULL, *cb_buf = NULL, *cr_buf = NULL;
int frameWidth = cfg->vlib_stage_decoding_config->controller.width;
int frameHeight = cfg->vlib_stage_decoding_config->controller.height;
int outFrameWidth = cfg->vlib_stage_decoding_config->picture->width;
int outFrameHeight = cfg->vlib_stage_decoding_config->picture->height;
if(frameWidth < outFrameWidth &&
frameHeight < outFrameWidth)
{
/* Expand the Y buffer */
int w_cropping = ((frameWidth * 2) - outFrameWidth) / 2;
int h_cropping = ((frameHeight * 2) - outFrameHeight) / 2;
y_buf = vp_os_malloc( outFrameWidth * outFrameHeight);
encoder_expand_buffer_x2_yuv420p ( cfg->vlib_stage_decoding_config->picture->y_buf,
y_buf,
frameWidth - w_cropping,
frameHeight - h_cropping,
outFrameWidth);
cb_buf = vp_os_malloc( outFrameWidth * outFrameHeight / 4);
cr_buf = vp_os_malloc( outFrameWidth * outFrameHeight / 4);
/* Expand the U buffer */
encoder_expand_buffer_x2_yuv420p(cfg->vlib_stage_decoding_config->picture->cb_buf,
cb_buf,
(frameWidth - w_cropping) / 2,
(frameHeight - h_cropping) / 2,
outFrameWidth / 2);
/* Expand the V buffer */
encoder_expand_buffer_x2_yuv420p(cfg->vlib_stage_decoding_config->picture->cr_buf,
cr_buf,
(frameWidth - w_cropping) / 2,
(frameHeight - h_cropping) / 2,
outFrameWidth / 2);
picture->data[0] = picture->base[0] = y_buf;
picture->data[1] = picture->base[1] = cb_buf;
picture->data[2] = picture->base[2] = cr_buf;
} else {
picture->data[0] = picture->base[0] = cfg->vlib_stage_decoding_config->picture->y_buf;
picture->data[1] = picture->base[1] = cfg->vlib_stage_decoding_config->picture->cb_buf;
picture->data[2] = picture->base[2] = cfg->vlib_stage_decoding_config->picture->cr_buf;
}
picture->linesize[0] = cfg->vlib_stage_decoding_config->picture->y_line_size;
picture->linesize[1] = cfg->vlib_stage_decoding_config->picture->cb_line_size;
picture->linesize[2] = cfg->vlib_stage_decoding_config->picture->cr_line_size;
write_video_frame(oc, video_st);
if (y_buf) vp_os_free(y_buf);
if (cb_buf) vp_os_free(cb_buf);
if (cr_buf) vp_os_free(cr_buf);
}
result = C_OK;
}
cfg->previous_num_picture_decoded = cfg->vlib_stage_decoding_config->num_picture_decoded;
}
}
out->numBuffers = in->numBuffers;
out->indexBuffer = in->indexBuffer;
out->buffers = in->buffers;
cfg->success = ((result == C_OK) && cfg->first_frame_ok);
vp_os_mutex_unlock(&out->lock);
return result;
}
C_RESULT encoder_stage_close(encoder_stage_config_t *cfg)
{
close_video_file();
const char *ardtFileName = NULL;
const char *ardtData = NULL;
FILE *ardtFile = NULL;
movie_atom_t *ardtAtom = NULL;
if(cfg->success)
{
ardtFileName = cfg->file_dest;
ardtData = "This is just for test";
if (NULL == ardtFileName || NULL == ardtData)
{
cfg->success = FALSE;
}
}
if(cfg->success)
{
ardtFile = fopen(ardtFileName, "ab");
if (NULL == ardtFile)
{
cfg->success = FALSE;
}
}
if (cfg->success)
{
ardtAtom = ardtAtomFromPathAndDroneVersion(ardtData, 1);
if (-1 == writeAtomToFile(&ardtAtom, ardtFile))
{
cfg->success = FALSE;
}
fclose (ardtFile);
}
if (cfg->success == TRUE) {
LOGD(TAG, "Updated atom info [OK]");
}
LOGV(TAG, "Encoder stage close called");
return C_OK;
}
PROTO_THREAD_ROUTINE(encoder, data)
{
JNIEnv* env = NULL;
jobject service = data;
if (g_vm) {
(*g_vm)->AttachCurrentThread (g_vm, (JNIEnv **) &env, NULL);
}
LOGD(TAG, "Encoder thread started [OK]");
C_RESULT res = C_FAIL;
PIPELINE_HANDLE encoder_pipeline_handle;
vp_api_io_pipeline_t pipeline;
vp_api_io_data_t out;
vp_api_io_stage_t stages[NB_STAGES];
vp_api_picture_t picture;
video_stage_io_file_config_t ifc;
vlib_stage_decoding_config_t vec;
encoder_stage_config_t qec;
vp_os_thread_priority(vp_os_thread_self(), ENCODER_PRIORITY);
vp_os_memset(&ifc, 0, sizeof( ifc ));
vp_os_memset(&vec, 0, sizeof( vec ));
vp_os_memset(&picture, 0, sizeof( picture ));
/// Picture configuration
picture.format = PIX_FMT_YUV420P;
picture.width = QVGA_WIDTH;
picture.height = QVGA_HEIGHT;
picture.framerate = 20;
picture.y_buf = vp_os_malloc( picture.width * picture.height );
picture.cr_buf = vp_os_malloc( picture.width * picture.height / 4);
picture.cb_buf = vp_os_malloc( picture.width * picture.height / 4);
picture.y_line_size = picture.width;
picture.cb_line_size = picture.width / 2;
picture.cr_line_size = picture.width / 2;
vec.width = picture.width;
vec.height = picture.height;
vec.picture = &picture;
vec.luma_only = FALSE;
vec.block_mode_enable = TRUE;
qec.vlib_stage_decoding_config = &vec;
ifc.filename = NULL;
qec.file_src = NULL;
pipeline.nb_stages = 0;
stages[pipeline.nb_stages].type = VP_API_INPUT_BUFFER;
stages[pipeline.nb_stages].cfg = (void *)&ifc;
stages[pipeline.nb_stages++].funcs = video_stage_io_file_funcs;
stages[pipeline.nb_stages].type = VP_API_FILTER_DECODER;
stages[pipeline.nb_stages].cfg = (void*)&vec;
stages[pipeline.nb_stages++].funcs = vlib_decoding_funcs;
stages[pipeline.nb_stages].type = VP_API_FILTER_ENCODER;
stages[pipeline.nb_stages].cfg = (void*)&qec;
stages[pipeline.nb_stages++].funcs = encoder_stage_funcs;
pipeline.stages = &stages[0];
LOGD(TAG, "Encoder thread initialized [OK]");
while( !ardrone_tool_exit() && encoder_sgate_stop_requested != TRUE )
{
if(encoder_stage_in_pause)
{
LOGD(TAG, "Pausing encoder thread.");
vp_os_mutex_lock(&encoder_stage_mutex);
vp_os_cond_wait(&encoder_stage_condition);
vp_os_mutex_unlock(&encoder_stage_mutex);
LOGD(TAG, "Encoder thread resumed.");
}
jstring filenameString = encoder_stage_get_next_file(env, service, "enc");
if(filenameString)
{
const char* filename = (*env)->GetStringUTFChars(env, filenameString, NULL);
int len = strlen(filename);
ifc.filename = vp_os_malloc((len + 1)*sizeof(char));
strncpy(ifc.filename, filename, len + 1);
LOGD(TAG, "Source file: %s", ifc.filename);
(*env)->ReleaseStringUTFChars(env, filenameString, filename);
ifc.filename[len] = '\0';
qec.file_src = vp_os_malloc((len + 1) * sizeof(char));
qec.file_dest = vp_os_malloc((len + 1) * sizeof(char));
strncpy(qec.file_src, ifc.filename, len+1);
res = vp_api_open(&pipeline, &encoder_pipeline_handle);
if( SUCCEED(res) )
{
int thread_state = SUCCESS;
out.status = VP_API_STATUS_INIT;
while( !ardrone_tool_exit() && (thread_state == SUCCESS) && encoder_sgate_stop_requested != TRUE )
{
if( SUCCEED(vp_api_run(&pipeline, &out)) )
{
if( (out.status == VP_API_STATUS_PROCESSING || out.status == VP_API_STATUS_STILL_RUNNING) )
thread_state = SUCCESS;
else if(out.status == VP_API_STATUS_ENDED)
{
LOGV(TAG, "Finished transcoding video %s with success", qec.file_src);
thread_state = SUCCESS;
break;
}
}
else
{
thread_state = -1; // Finish this thread
LOGV(TAG, "Finished transcoding video %s with error", qec.file_src);
}
}
vp_api_close(&pipeline, &encoder_pipeline_handle);
if (thread_state == SUCCESS) {
if (remove(qec.file_src) == -1) {
LOGW(TAG, "Can't delete file %s", qec.file_src);
}
notify_media_ready(env, service, &qec);
} else {
LOGW(TAG, "Error happened during video transcoding. Removing damaged file %s", qec.file_dest);
if (remove(qec.file_dest) == -1) {
LOGW(TAG, "Can't delete file %d", qec.file_dest);
}
encoder_sgate_stop_requested = TRUE;
}
}
if (ifc.filename != NULL) {
vp_os_free(ifc.filename);
ifc.filename = NULL;
}
if (qec.file_src != NULL) {
vp_os_free(qec.file_src);
qec.file_src = NULL;
}
if (qec.file_src != NULL) {
vp_os_free(qec.file_dest);
qec.file_dest = NULL;
}
}
else
{
encoder_sgate_stop_requested = TRUE;
}
}
vp_os_free(picture.y_buf);
vp_os_free(picture.cb_buf);
vp_os_free(picture.cr_buf);
parrot_java_callbacks_call_void_method(env, service, "onTranscodingFinished");
LOGV(TAG, "Encoder stage thread stopped.");
(*env)->DeleteGlobalRef(env, service);
if (g_vm) {
(*g_vm)->DetachCurrentThread (g_vm);
}
return (THREAD_RET)0;
}
/*
* transcoding_service_stub.h
*
* Created on: Apr 6, 2012
* Author: "Dmytro Baryskyy"
*/
#ifndef TRANSCODING_SERVICE_STUB_H_
#define TRANSCODING_SERVICE_STUB_H_
typedef enum
{
VIDEO_RECORD_HOLD, // Video recording is on hold, waiting for the start command. This is the default state.
VIDEO_RECORD_START, // Video recording has started.
VIDEO_PICTURE_START,
VIDEO_PICTURE_HOLD,
VIDEO_RECORD_STOP // Video recording has been stopped. Stage will end and restart.
} video_record_state;
typedef struct _encoder_stage_config_t_
{
// Public
char *file_src;
char *file_dest;
video_record_state startRec;
vlib_stage_decoding_config_t *vlib_stage_decoding_config;
AVCodec *codec;
AVFormatContext *oc;
AVOutputFormat *fmt;
AVStream *video_s;
AVCodecContext *c;
int i, out_size, size, x, y, outbuf_size;
FILE *f;
// Private
uint32_t *numframes;
int starting_num_frames;
int previous_num_picture_decoded;
bool_t success;
bool_t first_frame_ok;
bool_t video_file_open;
} encoder_stage_config_t;
static C_RESULT create_video_file(const char*filename,int width,int height,int frame_rate, enum PixelFormat pix_fmt);
static void close_video_file();
static AVStream *add_video_stream(AVFormatContext *oc, enum CodecID codec_id,int width, int height, int frame_rate, enum PixelFormat pix_fmt);
static AVFrame *alloc_picture(enum PixelFormat pix_fmt, int width, int height);
static C_RESULT open_video(AVFormatContext *oc, AVStream *st);
static C_RESULT write_video_frame(AVFormatContext *oc, AVStream *st);
static void close_video(AVFormatContext *oc, AVStream *st);
PROTO_THREAD_ROUTINE(encoder, data);
///////////////////////////////////////////////
// FUNCTIONS
C_RESULT
encoder_handle (encoder_stage_config_t * cfg, PIPELINE_MSG msg_id, void *callback, void *param);
/**
* @fn Open the quicktime encoder stage
* @param quicktime_encoder_stage_config_t *cfg
* @return VP_SUCCESS
*/
C_RESULT
encoder_stage_open(encoder_stage_config_t *cfg);
/**
* @fn Transform the quicktime encoder stage
* @param quicktime_encoder_stage_config_t *cfg
* @param vp_api_io_data_t *in
* @param vp_api_io_data_t *out
* @return VP_SUCCESS
*/
C_RESULT
encoder_stage_transform(encoder_stage_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out);
/**
* @fn Close the quicktime encoder stage
* @param quicktime_encoder_stage_config_t *cfg
* @return VP_SUCCESS
*/
C_RESULT
encoder_stage_close(encoder_stage_config_t *cfg);
extern const vp_api_stage_funcs_t encoder_stage_funcs;
#endif /* TRANSCODING_SERVICE_STUB_H_ */
/*
* frame_rate.c
*
* Created on: May 20, 2011
* Author: "Dmytro Baryskyy"
*/
#include <time.h>
#include "common.h"
#include "frame_rate.h"
//FPS will be calculated by measuring time that 30 frames took to render.
static const int FRAMES_COUNT = 240;
//Used to calculate frames count
static int renderedFramesCount = 0;
//Used to calculate render time
static time_t start = 0;
static float fps;
void parrot_frame_rate_init()
{
renderedFramesCount = 0;
start = time(NULL);
}
void parrot_frame_rate_on_draw_completed()
{
renderedFramesCount += 1;
if (renderedFramesCount > FRAMES_COUNT) {
fps = (float)renderedFramesCount / ((float)(time(NULL) - start));
LOGI("FRAME_RATE", "FPS: %.2f", fps);
renderedFramesCount = 0;
start = time(NULL);
}
}
/*
* frame_rate.h
*
* Created on: May 20, 2011
* Author: "Dmytro Baryskyy"
*/
#ifndef FRAME_RATE_H_
#define FRAME_RATE_H_
extern void parrot_frame_rate_init();
extern void parrot_frame_rate_on_draw_completed();
#endif /* FRAME_RATE_H_ */
//
// Shaders.m
// FreeFlight
//
// Created by Frédéric D'HAEYER on 24/10/11.
// Copyright 2011 PARROT. All rights reserved.
//
#include "opengl_shader.h"
#include "common.h"
/* Create and compile a shader from the provided source(s) */
GLint opengl_shader_compile(GLuint *shader, GLenum type, GLsizei count, const char* content_file)
{
#if defined(DEBUG_SHADER)
printf("%s : %d\n", __FUNCTION__, __LINE__);
#endif
GLint status;
const GLchar *sources = (const GLchar *)content_file;
// get source code
if (!sources)
{
printf("Failed to load vertex shader\n");
return 0;
}
*shader = glCreateShader(type); // create shader
glShaderSource(*shader, 1, &sources, NULL); // set source code in the shader
glCompileShader(*shader); // compile shader
#if defined(DEBUG_SHADER)
GLint logLength;
glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0)
{
GLchar *log = (GLchar *)vp_os_malloc(logLength);
glGetShaderInfoLog(*shader, logLength, &logLength, log);
printf("Shader compile log:\n%s\n", log);
vp_os_free(log);
}
#endif
glGetShaderiv(*shader, GL_COMPILE_STATUS, &status);
if (status == GL_FALSE)
{
printf("Failed to compile shader:\n");
printf("%s\n", sources);
}
return status;
}
/* Link a program with all currently attached shaders */
GLint opengl_shader_link(GLuint prog)
{
#if defined(DEBUG_SHADER)
printf("%s : %d\n", __FUNCTION__, __LINE__);
#endif
GLint status;
glLinkProgram(prog);
#if defined(DEBUG_SHADER)
GLint logLength;
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0)
{
GLchar *log = (GLchar *)vp_os_malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
printf("Program link log:\n%s\n", log);
vp_os_free(log);
}
#endif
glGetProgramiv(prog, GL_LINK_STATUS, &status);
if (status == GL_FALSE)
printf("Failed to link program %d\n", prog);
return status;
}
/* Validate a program (for i.e. inconsistent samplers) */
GLint opengl_shader_validate(GLuint prog)
{
#if defined(DEBUG_SHADER)
printf("%s : %d\n", __FUNCTION__, __LINE__);
#endif
GLint logLength, status;
glValidateProgram(prog);
glGetProgramiv(prog, GL_INFO_LOG_LENGTH, &logLength);
if (logLength > 0)
{
GLchar *log = (GLchar *)vp_os_malloc(logLength);
glGetProgramInfoLog(prog, logLength, &logLength, log);
printf("Program validate log:\n%s\n", log);
vp_os_free(log);
}
glGetProgramiv(prog, GL_VALIDATE_STATUS, &status);
if (status == GL_FALSE)
printf("Failed to validate program %d\n", prog);
return status;
}
/* delete shader resources */
void opengl_shader_destroy(GLuint vertShader, GLuint fragShader, GLuint prog)
{
#if defined(DEBUG_SHADER)
printf("%s : %d\n", __FUNCTION__, __LINE__);
#endif
if (vertShader) {
glDeleteShader(vertShader);
vertShader = 0;
}
if (fragShader) {
glDeleteShader(fragShader);
fragShader = 0;
}
if (prog) {
glDeleteProgram(prog);
prog = 0;
}
}
//
// Shaders.h
// FreeFlight
//
// Created by Frédéric D'HAEYER on 24/10/11.
// Copyright 2011 PARROT. All rights reserved.
//
#ifndef _OPENGL_SHADER_H_
#define _OPENGL_SHADER_H_
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
// #define DEBUG_SHADER
/* Shader Utilities */
GLint opengl_shader_compile(GLuint *shader, GLenum type, GLsizei count, const char *content_file);
GLint opengl_shader_link(GLuint prog);
GLint opengl_shader_validate(GLuint prog);
void opengl_shader_destroy(GLuint vertShader, GLuint fragShader, GLuint prog);
#endif // _OPENGL_SHADER_H_
/*
* opengl_stage.c
* Test
*
* Created by Karl Leplat on 22/02/10.
* Copyright 2010 Parrot SA. All rights reserved.
*
*/
#include "opengl_stage.h"
#include "time.h"
float DEBUG_fps = 0.0;
extern opengl_video_stage_config_t ovsc;
const vp_api_stage_funcs_t opengl_video_stage_funcs = {
(vp_api_stage_handle_msg_t) NULL,
(vp_api_stage_open_t) opengl_video_stage_open,
(vp_api_stage_transform_t) opengl_video_stage_transform,
(vp_api_stage_close_t) opengl_video_stage_close
};
C_RESULT opengl_video_stage_open(opengl_video_stage_config_t *cfg)
{
vp_os_mutex_init(&cfg->mutex);
return C_OK;
}
C_RESULT opengl_video_stage_transform(opengl_video_stage_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
{
static struct timeval tvPrev = {0, 0}, tvNow = {0, 0};
static int nbFramesForCalc = 1;
#define CALCULATE_EVERY_X_FRAMES 10
if (0 == --nbFramesForCalc)
{
nbFramesForCalc = CALCULATE_EVERY_X_FRAMES;
tvPrev.tv_sec = tvNow.tv_sec;
tvPrev.tv_usec = tvNow.tv_usec;
gettimeofday(&tvNow, NULL);
if (0 != tvPrev.tv_sec) // Avoid first time calculation
{
float timeDiffMillis = ((tvNow.tv_sec - tvPrev.tv_sec) * 1000.0) + ((tvNow.tv_usec - tvPrev.tv_usec) / 1000.0);
DEBUG_fps = (0.9 * DEBUG_fps) + (0.1 * ((1000.0 * CALCULATE_EVERY_X_FRAMES) / timeDiffMillis));
}
}
vp_os_mutex_lock( &out->lock );
if(out->status == VP_API_STATUS_INIT)
{
out->status = VP_API_STATUS_PROCESSING;
}
if(out->status == VP_API_STATUS_PROCESSING )
{
vp_os_mutex_lock( &cfg->mutex );
if(cfg->video_decoder->num_picture_decoded > cfg->num_picture_decoded)
{
cfg->widthImage = cfg->video_decoder->src_picture->width;
cfg->heightImage = cfg->video_decoder->src_picture->height;
cfg->widthTexture = cfg->video_decoder->dst_picture->width;
cfg->heightTexture = cfg->video_decoder->dst_picture->height;
switch(cfg->video_decoder->dst_picture->format)
{
case PIX_FMT_RGB565:
cfg->bytesPerPixel = 2;
cfg->format = GL_RGB;
cfg->type = GL_UNSIGNED_SHORT_5_6_5;
break;
case PIX_FMT_RGB24:
cfg->bytesPerPixel = 3;
cfg->format = GL_RGB;
cfg->type = GL_UNSIGNED_BYTE;
break;
default:
cfg->bytesPerPixel = 4;
cfg->format = GL_RGBA;
cfg->type = GL_UNSIGNED_BYTE;
break;
}
if(cfg->data != in->buffers[in->indexBuffer])
cfg->data = in->buffers[in->indexBuffer];
cfg->num_picture_decoded = cfg->video_decoder->num_picture_decoded;
out->numBuffers = in->numBuffers;
out->indexBuffer = in->indexBuffer;
out->buffers = in->buffers;
out->size = in->size;
}
vp_os_mutex_unlock( &cfg->mutex );
}
vp_os_mutex_unlock( &out->lock );
return C_OK;
}
C_RESULT opengl_video_stage_close(opengl_video_stage_config_t *cfg)
{
vp_os_mutex_destroy(&cfg->mutex);
return C_OK;
}
opengl_video_stage_config_t* opengl_video_stage_get(void)
{
return &ovsc;
}
/*
* opengl_stage.h
* Test
*
* Created by Karl Leplat on 22/02/10.
* Copyright 2010 Parrot SA. All rights reserved.
*
*/
#ifndef _OPENGL_STAGE_H_
#define _OPENGL_STAGE_H_
#include "common.h"
typedef struct _opengl_video_config_t
{
video_decoder_config_t *video_decoder;
vp_os_mutex_t mutex;
GLuint widthImage;
GLuint heightImage;
GLuint widthTexture;
GLuint heightTexture;
GLfloat scaleModelX;
GLfloat scaleModelY;
GLfloat scaleTextureX;
GLfloat scaleTextureY;
GLuint bytesPerPixel;
GLenum format;
GLenum type;
void* data;
GLuint identifier;
uint32_t num_picture_decoded;
uint32_t num_frames;
} opengl_video_stage_config_t;
C_RESULT opengl_video_stage_open(opengl_video_stage_config_t *cfg);
C_RESULT opengl_video_stage_transform(opengl_video_stage_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out);
C_RESULT opengl_video_stage_close(opengl_video_stage_config_t *cfg);
opengl_video_stage_config_t* opengl_video_stage_get(void);
extern const vp_api_stage_funcs_t opengl_video_stage_funcs;
#endif // _OPENGL_STAGE_H_
/*
* video_stage_renderer.c
*
* Created on: May 18, 2011
* Author: "Dmytro Baryskyy"
*/
#include <android/bitmap.h>
#include "common.h"
#include "frame_rate.h"
#include "video_stage_renderer.h"
#include "../Callbacks/java_callbacks.h"
static const char* TAG = "video_stage_renderer";
static opengl_size_t screen_size;
int video_width = 0;
int video_height = 0;
static bool_t recalculate_video_texture = FALSE;
// Holds video data
static uint8_t *pixbuff = NULL;
opengl_scaling scaling;
opengl_texture_t texture;
//GLuint program;
static int32_t current_num_picture_decoded = 0;
static int32_t current_num_frames = 0;
opengl_size_t oldsize;
static void printGLString(const char *name, GLenum s)
{
const char *v = (const char *) glGetString(s);
LOGI(TAG, "GL %s = %s\n", name, v);
}
void parrot_video_stage_renderer_invalidate()
{
recalculate_video_texture = TRUE;
}
void parrot_video_stage_init()
{
// Left empty as OpenGL drawing is performed on java side
}
void parrot_video_stage_deinit()
{
// Left empty as OpenGL drawing is performed on java side
}
JNIEXPORT jboolean JNICALL
Java_com_parrot_freeflight_video_VideoStageRenderer_getVideoFrameNative(JNIEnv *env, jobject obj, jobject bitmap, jintArray videoSize)
{
return FALSE;
}
/*
* video_stage_renderer.h
*
* Created on: May 18, 2011
* Author: Dmytro Baryskyy
*/
#ifndef VIDEO_STAGE_RENDERER_H_
#define VIDEO_STAGE_RENDERER_H_
void parrot_video_stage_renderer_invalidate();
#endif /* VIDEO_STAGE_RENDERER_H_ */
/*
* app.c
*
* Created on: May 4, 2011
* Author: Dmytro Baryskyy
*/
#include <ardrone_tool/Academy/academy.h>
#include <VP_Os/vp_os_thread.h>
#include "common.h"
#include "ControlData.h"
#include "Controller/virtual_gamepad.h"
#include "app.h"
#ifndef STREAM_WIDTH
#define STREAM_WIDTH (hdtv360P_WIDTH)
#endif
#ifndef STREAM_HEIGHT
#define STREAM_HEIGHT (hdtv360P_HEIGHT)
#endif
extern ControlData ctrldata;
//#define DEBUG_THREAD 1
static bool_t bContinue = TRUE;
ardrone_info_t ardrone_info = { 0 };
vp_stages_latency_estimation_config_t vlat;
opengl_video_stage_config_t ovsc;
static const char* TAG = "APP";
char drone_address[16];
JavaVM* g_vm = NULL;
BEGIN_THREAD_TABLE
THREAD_TABLE_ENTRY(app_main, AT_THREAD_PRIORITY)
THREAD_TABLE_ENTRY(ardrone_control, NAVDATA_THREAD_PRIORITY)
THREAD_TABLE_ENTRY(navdata_update, NAVDATA_THREAD_PRIORITY)
THREAD_TABLE_ENTRY(video_stage, VIDEO_THREAD_PRIORITY)
THREAD_TABLE_ENTRY(video_recorder, VIDEO_RECORDER_THREAD_PRIORITY)
END_THREAD_TABLE
JNIEXPORT jint JNICALL
JNI_OnLoad(JavaVM *vm, void *reserved)
{
LOGI(TAG, "Library has been loaded");
// Saving the reference to the java virtual machine
g_vm = vm;
// Return the JNI version
return JNI_VERSION_1_6;
}
DEFINE_THREAD_ROUTINE(app_main, data)
{
LOGI(TAG, "app_main thread started [OK]" );
C_RESULT res = C_FAIL;
vp_com_wifi_config_t* config = NULL;
JNIEnv* env = NULL;
if (g_vm) {
(*g_vm)->AttachCurrentThread (g_vm, (JNIEnv **) &env, NULL);
}
bContinue = TRUE;
mobile_main_param_t *param = data;
video_recorder_thread_param_t video_recorder_param;
video_recorder_param.priority = VIDEO_RECORDER_THREAD_PRIORITY;
video_recorder_param.finish_callback = param->academy_download_callback_func;
vp_os_memset(&ardrone_info, 0x0, sizeof(ardrone_info_t));
while ((config = (vp_com_wifi_config_t *)wifi_config()) != NULL && strlen(config->itfName) == 0)
{
//Waiting for wifi initialization
vp_os_delay(250);
if (ardrone_tool_exit() == TRUE) {
if (param != NULL && param->callback != NULL) {
param->callback(env, param->obj, ARDRONE_MESSAGE_DISCONNECTED);
}
return 0;
}
}
LOGD(TAG, "WIFI is available. Trying to get AR.Drone IP address..." );
vp_os_memcpy(&ardrone_info.drone_address[0], config->server, strlen(config->server));
LOGI(TAG, "AR.Drone IP address: %s", ardrone_info.drone_address);
while (-1 == getDroneVersion (param->root_dir, &ardrone_info.drone_address[0], &ardroneVersion))
{
LOGD (TAG, "Getting AR.Drone version");
vp_os_delay (250);
}
sprintf(&ardrone_info.drone_version[0], "%u.%u.%u", ardroneVersion.majorVersion, ardroneVersion.minorVersion, ardroneVersion.revision);
LOGD (TAG, "ARDrone Version : %s\n", &ardrone_info.drone_version[0]);
LOGI(TAG, "Drone Family: %d", ARDRONE_VERSION());
res = ardrone_tool_setup_com( NULL );
if( FAILED(res) )
{
LOGW(TAG, "Setup com failed");
LOGW(TAG, "Wifi initialization failed. It means either:");
LOGW(TAG, "\t* you're not root (it's mandatory because you can set up wifi connection only as root)\n");
LOGW(TAG, "\t* wifi device is not present (on your pc or on your card)\n");
LOGW(TAG, "\t* you set the wrong name for wifi interface (for example rausb0 instead of wlan0) \n");
LOGW(TAG, "\t* ap is not up (reboot card or remove wifi usb dongle)\n");
LOGW(TAG, "\t* wifi device has no antenna\n");
if (param != NULL && param->callback != NULL) {
param->callback(env, param->obj, ARDRONE_MESSAGE_ERR_NO_WIFI);
}
}
else
{
LOGD(TAG, "ardrone_tool_setup_com [OK]");
#define NB_IPHONE_PRE_STAGES 0
#define NB_IPHONE_POST_STAGES 2
//Alloc structs
specific_parameters_t * params = (specific_parameters_t *)vp_os_calloc(1, sizeof(specific_parameters_t));
specific_stages_t * iphone_pre_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
specific_stages_t * iphone_post_stages = (specific_stages_t*)vp_os_calloc(1, sizeof(specific_stages_t));
vp_api_picture_t * in_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));
vp_api_picture_t * out_picture = (vp_api_picture_t*) vp_os_calloc(1, sizeof(vp_api_picture_t));
in_picture->width = STREAM_WIDTH;
in_picture->height = STREAM_HEIGHT;
out_picture->framerate = 20;
out_picture->format = PIX_FMT_RGB565;
out_picture->width = STREAM_WIDTH;
out_picture->height = STREAM_HEIGHT;
out_picture->y_buf = vp_os_malloc( STREAM_WIDTH * STREAM_HEIGHT * 2 );
out_picture->cr_buf = NULL;
out_picture->cb_buf = NULL;
out_picture->y_line_size = STREAM_WIDTH * 2;
out_picture->cb_line_size = 0;
out_picture->cr_line_size = 0;
//Define the list of stages size
iphone_pre_stages->length = NB_IPHONE_PRE_STAGES;
iphone_post_stages->length = NB_IPHONE_POST_STAGES;
//Alloc the lists
iphone_pre_stages->stages_list = NULL;
iphone_post_stages->stages_list = (vp_api_io_stage_t*)vp_os_calloc(iphone_post_stages->length,sizeof(vp_api_io_stage_t));
//Fill the POST-stages------------------------------------------------------
int postStageNumber = 0;
vp_os_memset (&vlat, 0x0, sizeof (vlat));
vlat.state = 0;
vlat.last_decoded_frame_info= (void *)&vec;
iphone_post_stages->stages_list[postStageNumber].type = VP_API_FILTER_DECODER;
iphone_post_stages->stages_list[postStageNumber].cfg = (void *)&vlat;
iphone_post_stages->stages_list[postStageNumber++].funcs = vp_stages_latency_estimation_funcs;
vp_os_memset (&ovsc, 0x0, sizeof (ovsc));
ovsc.video_decoder = &vec;
iphone_post_stages->stages_list[postStageNumber].type = VP_API_OUTPUT_LCD;
iphone_post_stages->stages_list[postStageNumber].cfg = (void *)&ovsc;
iphone_post_stages->stages_list[postStageNumber++].funcs = opengl_video_stage_funcs;
params->in_pic = in_picture;
params->out_pic = out_picture;
params->pre_processing_stages_list = iphone_pre_stages;
params->post_processing_stages_list = iphone_post_stages;
#if USE_THREAD_PRIORITIES
params->needSetPriority = 1;
params->priority = VIDEO_THREAD_PRIORITY;
#else
params->needSetPriority = 0;
params->priority = 0;
#endif
START_THREAD(video_stage, params);
if (IS_LEAST_ARDRONE2)
{
START_THREAD (video_recorder, (void *)&video_recorder_param);
LOGD(TAG, "Video recorder thread start [OK]");
}
LOGI(TAG, "Processing ardrone_tool_init. App name: %s, UserName: %s", param->app_name, param->user_name);
LOGI(TAG, "Root Dir: %s", param->root_dir);
LOGI(TAG, "Flight Dir: %s, Flight size: %d", param->flight_dir, param->flight_storing_size);
res = ardrone_tool_init(&ardrone_info.drone_address[0], strlen(&ardrone_info.drone_address[0]), NULL, param->app_name, param->user_name, param->root_dir, param->flight_dir, param->flight_storing_size, param->academy_download_callback_func);
if(SUCCEED(res))
{
LOGD(TAG, "AR.Drone tool initialization [OK]");
ardrone_tool_input_add(&virtual_gamepad);
LOGD(TAG, "Virtual gamepad has been added");
if (param != NULL && param->callback != NULL) {
param->callback(env, param->obj, ARDRONE_MESSAGE_CONNECTED_OK);
}
} else {
if (param != NULL && param->callback != NULL) {
param->callback(env, param->obj, ARDRONE_MESSAGE_UNKNOWN_ERR);
}
LOGE(TAG, "AR.Drone tool initialization [FAILED]");
bContinue = FALSE;
}
res = ardrone_tool_set_refresh_time(1000 / kAPS);
#if USE_THREAD_PRIORITIES
CHANGE_THREAD_PRIO (app_main, AT_THREAD_PRIORITY);
CHANGE_THREAD_PRIO (navdata_update, NAVDATA_THREAD_PRIORITY);
CHANGE_THREAD_PRIO (ardrone_control, NAVDATA_THREAD_PRIORITY);
#endif
while( SUCCEED(res) && bContinue == TRUE )
{
ardrone_tool_update();
}
JOIN_THREAD(video_stage);
LOGD(TAG, "Video stage thread stopped [OK]");
if (IS_LEAST_ARDRONE2)
{
JOIN_THREAD (video_recorder);
LOGD(TAG, "Video recorder thread stopped [OK]");
}
/* Unregistering for the current device */
ardrone_tool_input_remove( &virtual_gamepad );
res = ardrone_tool_shutdown();
LOGD(TAG, "AR.Drone tool shutdown [OK]");
if (param != NULL && param->callback != NULL) {
param->callback(env, param->obj, ARDRONE_MESSAGE_DISCONNECTED);
}
}
vp_os_free (data);
data = NULL;
(*env)->DeleteGlobalRef(env, param->obj);
if (g_vm) {
(*g_vm)->DetachCurrentThread (g_vm);
}
LOGI(TAG, "app_main thread has been stopped.");
return (THREAD_RET) res;
}
JNIEXPORT void JNICALL
JNI_OnUnload(JavaVM *vm, void *reserved)
{
g_vm = NULL;
LOGI(TAG, "Library has been unloaded");
}
void parrot_ardrone_notify_start(JNIEnv* env,
jobject obj,
ardroneEngineCallback callback,
const char *appName,
const char *userName,
const char* rootdir,
const char* flightdir,
int flight_storing_size,
academy_download_new_media academy_download_callback_func,
VIDEO_RECORDING_CAPABILITY recordingCapability)
{
video_stage_init();
video_recorder_init();
mobile_main_param_t *param = vp_os_malloc(sizeof(mobile_main_param_t));
if (NULL != param) {
param->obj = (*env)->NewGlobalRef(env, obj);
param->callback = callback;
vp_os_memset(&param->app_name, 0, STRING_BUFFER_LENGTH);
vp_os_memset(&param->user_name, 0, STRING_BUFFER_LENGTH);
vp_os_memset(&param->root_dir, 0, STRING_BUFFER_LENGTH);
vp_os_memset(&param->flight_dir, 0, STRING_BUFFER_LENGTH);
strncpy(param->app_name, appName, STRING_BUFFER_LENGTH);
strncpy(param->user_name, userName, STRING_BUFFER_LENGTH);
strncpy(param->root_dir, rootdir, STRING_BUFFER_LENGTH);
strncpy(param->flight_dir, flightdir, STRING_BUFFER_LENGTH);
param->flight_storing_size = flight_storing_size;
param->academy_download_callback_func = academy_download_callback_func;
ctrldata.recordingCapability = recordingCapability;
START_THREAD(app_main, param);
}
}
void parrot_ardrone_notify_exit()
{
parrot_ardrone_notify_pause();
bContinue = FALSE;
ardrone_tool_shutdown();
LOGD(TAG, "AR.Drone Tool Stop [OK]");
}
void parrot_ardrone_notify_pause()
{
video_stage_suspend_thread();
if (IS_LEAST_ARDRONE2)
{
video_recorder_suspend_thread();
}
ardrone_tool_suspend();
LOGD(TAG, "AR.Drone Tool Pause [OK]");
}
void parrot_ardrone_notify_resume()
{
video_stage_resume_thread();
if (IS_LEAST_ARDRONE2)
{
video_recorder_resume_thread();
}
ardrone_tool_resume();
LOGD(TAG, "AR.Drone Tool Resume [OK]");
}
C_RESULT custom_update_user_input(input_state_t* input_state, uint32_t user_input)
{
return C_OK;
}
C_RESULT custom_reset_user_input(input_state_t* input_state, uint32_t user_input)
{
return C_OK;
}
/* The event loop calls this method for the exit condition */
bool_t ardrone_tool_exit()
{
return bContinue == FALSE ? TRUE : FALSE;
}
C_RESULT ardrone_tool_display_custom()
{
return C_OK;
}
/*
* app.h
*
* Created on: May 4, 2011
* Author: Dmytro Baryskyy
*/
#ifndef APP_H_
#define APP_H_
#include <VP_Api/vp_api_thread_helper.h>
#include <academy_common.h>
#define STRING_BUFFER_LENGTH 512
// Put 1 if you want to set thread priorities (else put 0)
#define USE_THREAD_PRIORITIES (1)
/**
* Priorities for each "rt" threads
* Must be between 15 and 43
* Higher means more priority
*/
#define AT_THREAD_PRIORITY (47)
#define VIDEO_THREAD_PRIORITY (31)
#define NAVDATA_THREAD_PRIORITY (31)
#define VIDEO_RECORDER_THREAD_PRIORITY (15)
PROTO_THREAD_ROUTINE(app_main, data);
typedef enum {
ARDRONE_MESSAGE_UNKNOWN_ERR = -1,
ARDRONE_MESSAGE_CONNECTED_OK,
ARDRONE_MESSAGE_DISCONNECTED,
ARDRONE_MESSAGE_ERR_NO_WIFI
} ardrone_engine_message_t;
typedef void (*ardroneEngineCallback)(JNIEnv* /*env*/, jobject /*obj*/, ardrone_engine_message_t /*error*/);
typedef struct {
ardroneEngineCallback callback;
jobject obj;
char app_name[STRING_BUFFER_LENGTH];
char user_name[STRING_BUFFER_LENGTH];
char root_dir[STRING_BUFFER_LENGTH];
char flight_dir[STRING_BUFFER_LENGTH];
int flight_storing_size;
academy_download_new_media academy_download_callback_func;
} mobile_main_param_t;
extern ardrone_info_t ardrone_info;
extern void parrot_ardrone_notify_start(JNIEnv* env, jobject obj,
ardroneEngineCallback callback,
const char *appName,
const char *userName,
const char* rootdir,
const char* flightdir,
int flight_storing_size,
academy_download_new_media academy_download_callback_func,
VIDEO_RECORDING_CAPABILITY recordingCapability);
extern void parrot_ardrone_notify_pause();
extern void parrot_ardrone_notify_resume();
extern void parrot_ardrone_notify_exit();
PROTO_THREAD_ROUTINE(app_main, data);
PROTO_THREAD_ROUTINE(video_stage_player, data);
#endif /* APP_H_ */
/*
* common.h
*
* Created on: May 4, 2011
* Author: Dmytro Baryskyy
*/
#ifndef COMMON_H_
#define COMMON_H_
#ifdef _DEBUG_
#define LOGV(TAG, ...) ((void)__android_log_print(ANDROID_LOG_VERBOSE, TAG, __VA_ARGS__))
#define LOGD(TAG, ...) ((void)__android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__))
#define LOGI(TAG, ...) ((void)__android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__))
#define LOGW(TAG, ...) ((void)__android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__))
#define LOGE(TAG, ...) ((void)__android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__))
#else
#define LOGD(...) ;
#define LOGV(...) ;
#define LOGI(...) ;
#define LOGW(...) ;
#define LOGE(...) ;
#endif
#define VIDEO_WIDTH 320
#define VIDEO_HEIGHT 240
// How many times a second to refresh the screen
#define kFPS 30 // Frame per second
#define kAPS 40 // Number of accelerometer() function calls by second
//extern uint16_t default_image[VIDEO_WIDTH*VIDEO_HEIGHT];
// This enum should match the constants defined in com.parrot.freeflight.drone.NavData class
typedef enum _ERROR_STATE_
{
ERROR_STATE_NONE,
ERROR_STATE_NAVDATA_CONNECTION,
ERROR_STATE_START_NOT_RECEIVED,
ERROR_STATE_EMERGENCY_CUTOUT,
ERROR_STATE_EMERGENCY_MOTORS,
ERROR_STATE_EMERGENCY_CAMERA,
ERROR_STATE_EMERGENCY_PIC_WATCHDOG,
ERROR_STATE_EMERGENCY_PIC_VERSION,
ERROR_STATE_EMERGENCY_ANGLE_OUT_OF_RANGE,
ERROR_STATE_EMERGENCY_VBAT_LOW,
ERROR_STATE_EMERGENCY_USER_EL,
ERROR_STATE_EMERGENCY_ULTRASOUND,
ERROR_STATE_EMERGENCY_UNKNOWN,
ERROR_STATE_ALERT_CAMERA,
ERROR_STATE_ALERT_VBAT_LOW,
ERROR_STATE_ALERT_ULTRASOUND,
ERROR_STATE_ALERT_VISION,
ERROR_STATE_MAX
} ERROR_STATE;
// This enum should match the constants defined in com.parrot.freeflight.drone.NavData class
typedef enum {
NO_ALERT = 0,
VIDEO_CONNECTION_ALERT,
BATTERY_LOW_ALERT,
ULTRASOUND_ALERT,
VISION_ALERT,
START_NOT_RECEIVED,
CONTROL_LINK_NOT_AVAILABLE,
WIFI_NOT_AVAILABLE
} ARDRONE_ALERT_STATE;
// This enum should match the constants defined in com.parrot.freeflight.service.DroneControlService
typedef enum {
CONTROL_SET_YAW = 0,
CONTROL_SET_GAZ,
CONTROL_SET_PITCH,
CONTROL_SET_ROLL
} CONTROL_COMMAND;
typedef enum {
ID_PAIRING_BTN = 0,
ID_NETWORK_NAME_EDIT,
ID_ALTITUDE_LIMITED_BTN,
ID_ADAPTIVE_VIDEO_BTN,
ID_OUTDOOR_HULL_BTN,
ID_OUTDOOR_FLIGHT_BTN
} UI_CONTROL_ID;
// This enum should match DroneProxy.EVideoRecorderCapability enum.
typedef enum {
VIDEO_RECORDING_NOT_SUPPORTED,
VIDEO_RECORDING_360P,
VIDEO_RECORDING_720p
} VIDEO_RECORDING_CAPABILITY;
// Standard library
#include <stdlib.h>
#include <stdio.h>
#include <sys/time.h>
#include <time.h>
// JNI
#include <jni.h>
// Android
#include <android/log.h>
// VP_SDK
#include <VP_Os/vp_os_types.h>
//#define _DEBUG_
//#define _DEBUG_MODE_
#include <ardrone_api.h>
#include <control_states.h>
#include <ardrone_tool/ardrone_version.h>
#include <ardrone_tool/ardrone_tool.h>
#include <ardrone_tool/ardrone_tool_configuration.h>
#include <ardrone_tool/Academy/academy.h>
#include <ardrone_tool/Academy/academy_download.h>
#include <ardrone_tool/Control/ardrone_control.h>
#include <ardrone_tool/Control/ardrone_control_ack.h>
#include <ardrone_tool/Control/ardrone_control_configuration.h>
#include <ardrone_tool/Navdata/ardrone_navdata_client.h>
#include <ardrone_tool/UI/ardrone_input.h>
#include <ardrone_tool/Com/config_com.h>
#include <ardrone_tool/Video/video_com_stage.h>
//#include <ardrone_tool/Video/vlib_stage_decode.h>
#include <ardrone_tool/Video/video_stage.h>
#include <ardrone_tool/Video/video_stage_latency_estimation.h>
#include <ardrone_tool/Video/video_recorder_pipeline.h>
#include <ardrone_tool/Video/video_navdata_handler.h>
#include <utils/ardrone_time.h>
#include <utils/ardrone_date.h>
#include <VP_Os/vp_os.h>
#include <VP_Os/vp_os_print.h>
#include <VP_Os/vp_os_types.h>
#include <VP_Os/vp_os_signal.h>
#include <VP_Os/vp_os_malloc.h>
#include <VP_Os/vp_os_delay.h>
#include <VP_Api/vp_api.h>
#include <VP_Api/vp_api_error.h>
#include <VP_Api/vp_api_stage.h>
#include <VP_Api/vp_api_picture.h>
#include <VP_Api/vp_api_thread_helper.h>
#include <VLIB/Stages/vlib_stage_decode.h>
#include <VLIB/video_codec.h>
#include <iniparser3.0b/src/iniparser.h>
//#include <VLIB/Stages/vlib_stage_decode.h>
#include <ardrone_api.h>
#include "ARDroneTypes.h"
#include "ARDroneGeneratedTypes.h"
#include "ARDroneGeneratedCommandIn.h"
#include "hardware_capabilites.h"
#include "ControlData.h"
#include "Video/opengl_shader.h"
#include "Video/opengl_stage.h"
#include "Stubs/gl_bg_video_sprite_stub.h"
// This variable holds reference to the java virtual machine
extern JavaVM* g_vm;
#endif /* COMMON_H_ */
//
// hardware.c
// ARDroneEngine
//
// Created by Nicolas BRULEZ on 20/12/11.
// Copyright (c) 2011 Parrot. All rights reserved.
//
#include "common.h"
#include "hardware_capabilites.h"
#include <sys/utsname.h>
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
typedef enum {
_DEV_NOT_INIT = 0,
_DEV_IPHONE,
_DEV_IPAD,
_DEV_IPOD,
_DEV_SIMULATOR,
_DEV_OTHER,
} _hw_deviceType;
typedef struct {
_hw_deviceType device;
int major;
int minor;
} _device_hw_t;
// Infos for each vCaps :
const vCapsInfo_t vCapsInfo [VIDEO_CAPABILITIES_NUM] = {
{ 15, 500, H264_360P_CODEC }, // Pre-A4 devices : 15 fps, 0.5Mbps 360p
{ 25, 1500, H264_360P_CODEC }, // A4 devices : 25 fps, 1.5Mbps 360p
{ 30, 4000, H264_360P_CODEC }, // A5 devices : 30 fps, 4Mbps 360p
{ 30, 4000, H264_720P_CODEC } // Future devices : 30 fps, 4Mbps 720p
};
#define HWCAPS_INIT_WITH_FAILURE_RETURN(FAILRET) \
do \
{ \
if (0 != _getCurrentDevice()) \
{ \
return FAILRET; \
} \
} while (0)
#define HWCAPS_INIT() \
do \
{ \
if (0 != _getCurrentDevice()) \
{ \
return; \
} \
} while (0)
static _device_hw_t _currentDevice = {_DEV_NOT_INIT, 0, 0};
static int _androidVersion;
static char _gpuVendor[256];
void _parseDeviceString (char *str)
{
int _maj, _min;
_hw_deviceType _dev;
_maj = _min = 0;
_dev = _DEV_NOT_INIT;
char *myStr = NULL;
if (0 == strncmp ("iPhone", str, 6))
{
_dev = _DEV_IPHONE;
myStr = str + 6;
}
else if (0 == strncmp ("iPad", str, 4))
{
_dev = _DEV_IPAD;
myStr = str + 4;
}
else if (0 == strncmp ("iPod", str, 4))
{
_dev = _DEV_IPOD;
myStr = str + 4;
}
else if (0 == strncmp ("i386", str, 4))
{
_dev = _DEV_SIMULATOR;
}
else
{
_dev = _DEV_OTHER;
}
_currentDevice.device = _dev;
if (NULL != myStr)
{
int result = sscanf (myStr, "%d,%d", &_maj, &_min);
if (2 == result)
{
_currentDevice.major = _maj;
_currentDevice.minor = _min;
}
}
}
void setEnvironmentInfo(int androidVersion, char* gpuVendor)
{
_androidVersion = androidVersion;
strncpy(_gpuVendor, gpuVendor, 256);
}
int _getCurrentDevice (void)
{
struct utsname platform;
int rc = 0;
if (_DEV_NOT_INIT != _currentDevice.device)
{
return rc;
}
rc = uname(&platform);
if(rc == 0)
{
LOGD("HARDWARE", "Machine: %s, Sysname: %s, Version: %s", platform.machine, platform.sysname, platform.version);
_parseDeviceString(platform.machine);
}
return rc;
}
videoCapabilities getDeviceVideoCapabilites (void)
{
videoCapabilities retCaps = VIDEO_CAPABILITIES_MIN;
HWCAPS_INIT_WITH_FAILURE_RETURN (retCaps);
switch (_currentDevice.device) {
case _DEV_IPHONE:
if (3 > _currentDevice.major)
{
retCaps = VIDEO_CAPABILITIES_MIN;
}
else if (3 == _currentDevice.major)
{
retCaps = VIDEO_CAPABILITIES_IP4;
}
else
{
retCaps = VIDEO_CAPABILITIES_360;
}
break;
case _DEV_IPOD:
if (4 > _currentDevice.major)
{
retCaps = VIDEO_CAPABILITIES_MIN;
}
else
{
retCaps = VIDEO_CAPABILITIES_IP4;
}
break;
case _DEV_IPAD:
if (2 > _currentDevice.major)
{
retCaps = VIDEO_CAPABILITIES_IP4;
}
else
{
retCaps = VIDEO_CAPABILITIES_360;
}
break;
default:
retCaps = VIDEO_CAPABILITIES_360;
break;
}
return retCaps;
}
void printDeviceInfos (void)
{
HWCAPS_INIT();
LOGD ("HARDWARE", "Device number : %d | Major : %d | Minor : %d\n", _currentDevice.device, _currentDevice.major, _currentDevice.minor);
}
#ifndef _HARDWARE_H_
#define _HARDWARE_H_
#include <VLIB/video_codec.h>
typedef enum
{
VIDEO_CAPABILITIES_MIN = 0,
VIDEO_CAPABILITIES_IP4,
VIDEO_CAPABILITIES_360,
VIDEO_CAPABILITIES_720,
VIDEO_CAPABILITIES_NUM,
} videoCapabilities;
typedef struct _vcaps {
int supportedFps;
int supportedBitrate;
codec_type_t defaultCodec;
} vCapsInfo_t;
extern const vCapsInfo_t vCapsInfo [VIDEO_CAPABILITIES_NUM];
void setEnvironmentInfo(int androidVersion, char* gpuVendor);
videoCapabilities getDeviceVideoCapabilites (void);
void printDeviceInfos (void);
#endif
/**
* \brief File stage declaration
* \author D'HAEYER Frédéric <frederic.dhaeyer@parrot.com>
* \date 14/06/2011
*/
///////////////////////////////////////////////
// INCLUDES
#include <VP_Api/vp_api_error.h>
#include <VP_Os/vp_os_assert.h>
#include <VP_Os/vp_os_print.h>
#include <VP_Os/vp_os_delay.h>
#include <VP_Os/vp_os_malloc.h>
#include <video_stage_io_file.h>
const vp_api_stage_funcs_t video_stage_io_file_funcs =
{
(vp_api_stage_handle_msg_t) NULL,
(vp_api_stage_open_t) video_stage_io_file_stage_open,
(vp_api_stage_transform_t) video_stage_io_file_stage_transform,
(vp_api_stage_close_t) video_stage_io_file_stage_close
};
C_RESULT
video_stage_io_file_stage_open(video_stage_io_file_config_t *cfg)
{
C_RESULT result = C_OK;
cfg->max_size = 0;
cfg->f = fopen(cfg->filename, "rb");
if(cfg->f == NULL)
{
PRINT("Missing input file\n");
result = C_FAIL;
}
return result;
}
C_RESULT
video_stage_io_file_stage_transform(video_stage_io_file_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out)
{
vp_os_mutex_lock(&out->lock);
if( out->status == VP_API_STATUS_INIT )
{
out->numBuffers = 1;
out->indexBuffer = 0;
out->buffers = (uint8_t **) vp_os_malloc (sizeof(uint8_t *));
out->buffers[out->indexBuffer] = (uint8_t *)NULL;
out->status = VP_API_STATUS_PROCESSING;
cfg->buffers = (int8_t **)out->buffers;
}
if(out->status == VP_API_STATUS_PROCESSING)
{
out->size = 0;
if(!feof(cfg->f))
{
if((fread(&out->size, sizeof(int32_t), 1, cfg->f) > 0) && (out->size > 0))
{
if(out->size > cfg->max_size)
{
cfg->max_size = out->size;
out->buffers[out->indexBuffer] = (uint8_t *)vp_os_realloc(out->buffers[out->indexBuffer], sizeof(uint8_t) * cfg->max_size);
}
if(!fread(out->buffers[out->indexBuffer], sizeof(uint8_t), out->size, cfg->f) == out->size)
out->status = VP_API_STATUS_ENDED;
}
else
{
out->size = 1;
out->status = VP_API_STATUS_ENDED;
}
}
else
{
out->size = 1;
out->status = VP_API_STATUS_ENDED;
}
}
vp_os_mutex_unlock(&out->lock);
return C_OK;
}
C_RESULT
video_stage_io_file_stage_close(video_stage_io_file_config_t *cfg)
{
fclose(cfg->f);
cfg->f = NULL;
if(cfg->buffers[0] != NULL)
{
vp_os_free(cfg->buffers[0]);
cfg->buffers[0] = NULL;
}
vp_os_free(cfg->buffers);
cfg->buffers = NULL;
return C_OK;
}
/**
* \brief File stage declaration
* \author D'HAEYER Frédéric <frederic.dhaeyer@parrot.com>
* \date 14/06/2011
*/
#ifndef _VIDEO_STAGE_IO_FILE_H_
#define _VIDEO_STAGE_IO_FILE_H_
#include <VP_Api/vp_api.h>
#include <stdio.h>
typedef struct _video_stage_io_file_config_t_
{
// Public
char *filename;
// Private
FILE *f;
int8_t **buffers;
int32_t max_size;
} video_stage_io_file_config_t;
///////////////////////////////////////////////
// FUNCTIONS
/**
* @fn Open the input file stage
* @param video_stage_io_file_config_t *cfg
* @return VP_SUCCESS
*/
C_RESULT
video_stage_io_file_stage_open(video_stage_io_file_config_t *cfg);
/**
* @fn Transform the input file stage
* @param video_stage_io_file_config_t *cfg
* @param vp_api_io_data_t *in
* @param vp_api_io_data_t *out
* @return VP_SUCCESS
*/
C_RESULT
video_stage_io_file_stage_transform(video_stage_io_file_config_t *cfg, vp_api_io_data_t *in, vp_api_io_data_t *out);
/**
* @fn Close the input file stage
* @param video_stage_io_file_config_t *cfg
* @return VP_SUCCESS
*/
C_RESULT
video_stage_io_file_stage_close(video_stage_io_file_config_t *cfg);
extern const vp_api_stage_funcs_t video_stage_io_file_funcs;
#endif // ! _VIDEO_STAGE_IO_FILE_H_
#CFLAGS=-Wall -O0 -g -fPIC
CFLAGS := -g
include $(all-subdir-makefiles)
APP_PROJECT_PATH := $(LOCAL_PATH)
APP_ABI := armeabi armeabi-v7a
\ No newline at end of file
LOCAL_PATH := $(call my-dir)
# AVCODEC
include $(CLEAR_VARS)
LOCAL_MODULE := AVCODEC-prebuilt
LOCAL_SRC_FILES := $(TARGET_ARCH_ABI)/libavcodec.so
include $(PREBUILT_SHARED_LIBRARY)
#AVUTIL
include $(CLEAR_VARS)
LOCAL_MODULE := AVUTIL-prebuilt
LOCAL_SRC_FILES := $(TARGET_ARCH_ABI)/libavutil.so
include $(PREBUILT_SHARED_LIBRARY)
#SWSCALE
include $(CLEAR_VARS)
LOCAL_MODULE := SWSCALE-prebuilt
LOCAL_SRC_FILES := $(TARGET_ARCH_ABI)/libswscale.so
include $(PREBUILT_SHARED_LIBRARY)
#AVFILTER
include $(CLEAR_VARS)
LOCAL_MODULE := AVFILTER-prebuilt
LOCAL_SRC_FILES := $(TARGET_ARCH_ABI)/libavfilter.so
include $(PREBUILT_SHARED_LIBRARY)
#AVFORMAT
include $(CLEAR_VARS)
LOCAL_MODULE := AVFORMAT-prebuilt
LOCAL_SRC_FILES := $(TARGET_ARCH_ABI)/libavformat.so
include $(PREBUILT_SHARED_LIBRARY)
#AVDEVICE
include $(CLEAR_VARS)
LOCAL_MODULE := AVDEVICE-prebuilt
LOCAL_SRC_FILES := $(TARGET_ARCH_ABI)/libavdevice.so
include $(PREBUILT_SHARED_LIBRARY)
This source diff could not be displayed because it is too large. You can view the blob instead.
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_AVFFT_H
#define AVCODEC_AVFFT_H
typedef float FFTSample;
typedef struct FFTComplex {
FFTSample re, im;
} FFTComplex;
typedef struct FFTContext FFTContext;
/**
* Set up a complex FFT.
* @param nbits log2 of the length of the input array
* @param inverse if 0 perform the forward transform, if 1 perform the inverse
*/
FFTContext *av_fft_init(int nbits, int inverse);
/**
* Do the permutation needed BEFORE calling ff_fft_calc().
*/
void av_fft_permute(FFTContext *s, FFTComplex *z);
/**
* Do a complex FFT with the parameters defined in av_fft_init(). The
* input data must be permuted before. No 1.0/sqrt(n) normalization is done.
*/
void av_fft_calc(FFTContext *s, FFTComplex *z);
void av_fft_end(FFTContext *s);
FFTContext *av_mdct_init(int nbits, int inverse, double scale);
void av_imdct_calc(FFTContext *s, FFTSample *output, const FFTSample *input);
void av_imdct_half(FFTContext *s, FFTSample *output, const FFTSample *input);
void av_mdct_calc(FFTContext *s, FFTSample *output, const FFTSample *input);
void av_mdct_end(FFTContext *s);
/* Real Discrete Fourier Transform */
enum RDFTransformType {
DFT_R2C,
IDFT_C2R,
IDFT_R2C,
DFT_C2R,
};
typedef struct RDFTContext RDFTContext;
/**
* Set up a real FFT.
* @param nbits log2 of the length of the input array
* @param trans the type of transform
*/
RDFTContext *av_rdft_init(int nbits, enum RDFTransformType trans);
void av_rdft_calc(RDFTContext *s, FFTSample *data);
void av_rdft_end(RDFTContext *s);
/* Discrete Cosine Transform */
typedef struct DCTContext DCTContext;
enum DCTTransformType {
DCT_II = 0,
DCT_III,
DCT_I,
DST_I,
};
/**
* Set up DCT.
* @param nbits size of the input array:
* (1 << nbits) for DCT-II, DCT-III and DST-I
* (1 << nbits) + 1 for DCT-I
*
* @note the first element of the input of DST-I is ignored
*/
DCTContext *av_dct_init(int nbits, enum DCTTransformType type);
void av_dct_calc(DCTContext *s, FFTSample *data);
void av_dct_end (DCTContext *s);
#endif /* AVCODEC_AVFFT_H */
/*
* DXVA2 HW acceleration
*
* copyright (c) 2009 Laurent Aimar
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_DXVA_H
#define AVCODEC_DXVA_H
#include <stdint.h>
#include <dxva2api.h>
/**
* This structure is used to provides the necessary configurations and data
* to the DXVA2 FFmpeg HWAccel implementation.
*
* The application must make it available as AVCodecContext.hwaccel_context.
*/
struct dxva_context {
/**
* DXVA2 decoder object
*/
IDirectXVideoDecoder *decoder;
/**
* DXVA2 configuration used to create the decoder
*/
const DXVA2_ConfigPictureDecode *cfg;
/**
* The number of surface in the surface array
*/
unsigned surface_count;
/**
* The array of Direct3D surfaces used to create the decoder
*/
LPDIRECT3DSURFACE9 *surface;
/**
* A bit field configuring the workarounds needed for using the decoder
*/
uint64_t workaround;
/**
* Private to the FFmpeg AVHWAccel implementation
*/
unsigned report_id;
};
#endif /* AVCODEC_DXVA_H */
/**
* @file
* This header is provided for compatibility only and will be removed
* on next major bump
*/
#ifndef AVCODEC_OPT_H
#define AVCODEC_OPT_H
#include "libavcodec/version.h"
#if FF_API_OPT_H
#include "libavutil/opt.h"
#endif
#endif /* AVCODEC_OPT_H */
/*
* Video Acceleration API (shared data between FFmpeg and the video player)
* HW decode acceleration for MPEG-2, MPEG-4, H.264 and VC-1
*
* Copyright (C) 2008-2009 Splitted-Desktop Systems
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VAAPI_H
#define AVCODEC_VAAPI_H
#include <stdint.h>
/**
* \defgroup VAAPI_Decoding VA API Decoding
* \ingroup Decoder
* @{
*/
/**
* This structure is used to share data between the FFmpeg library and
* the client video application.
* This shall be zero-allocated and available as
* AVCodecContext.hwaccel_context. All user members can be set once
* during initialization or through each AVCodecContext.get_buffer()
* function call. In any case, they must be valid prior to calling
* decoding functions.
*/
struct vaapi_context {
/**
* Window system dependent data
*
* - encoding: unused
* - decoding: Set by user
*/
void *display;
/**
* Configuration ID
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t config_id;
/**
* Context ID (video decode pipeline)
*
* - encoding: unused
* - decoding: Set by user
*/
uint32_t context_id;
/**
* VAPictureParameterBuffer ID
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t pic_param_buf_id;
/**
* VAIQMatrixBuffer ID
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t iq_matrix_buf_id;
/**
* VABitPlaneBuffer ID (for VC-1 decoding)
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t bitplane_buf_id;
/**
* Slice parameter/data buffer IDs
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t *slice_buf_ids;
/**
* Number of effective slice buffer IDs to send to the HW
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int n_slice_buf_ids;
/**
* Size of pre-allocated slice_buf_ids
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int slice_buf_ids_alloc;
/**
* Pointer to VASliceParameterBuffers
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
void *slice_params;
/**
* Size of a VASliceParameterBuffer element
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int slice_param_size;
/**
* Size of pre-allocated slice_params
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int slice_params_alloc;
/**
* Number of slices currently filled in
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
unsigned int slice_count;
/**
* Pointer to slice data buffer base
* - encoding: unused
* - decoding: Set by libavcodec
*/
const uint8_t *slice_data;
/**
* Current size of slice data
*
* - encoding: unused
* - decoding: Set by libavcodec
*/
uint32_t slice_data_size;
};
/* @} */
#endif /* AVCODEC_VAAPI_H */
/*
* The Video Decode and Presentation API for UNIX (VDPAU) is used for
* hardware-accelerated decoding of MPEG-1/2, H.264 and VC-1.
*
* Copyright (C) 2008 NVIDIA
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VDPAU_H
#define AVCODEC_VDPAU_H
/**
* \defgroup Decoder VDPAU Decoder and Renderer
*
* VDPAU hardware acceleration has two modules
* - VDPAU decoding
* - VDPAU presentation
*
* The VDPAU decoding module parses all headers using FFmpeg
* parsing mechanisms and uses VDPAU for the actual decoding.
*
* As per the current implementation, the actual decoding
* and rendering (API calls) are done as part of the VDPAU
* presentation (vo_vdpau.c) module.
*
* \defgroup VDPAU_Decoding VDPAU Decoding
* \ingroup Decoder
* @{
*/
#include <vdpau/vdpau.h>
#include <vdpau/vdpau_x11.h>
/** \brief The videoSurface is used for rendering. */
#define FF_VDPAU_STATE_USED_FOR_RENDER 1
/**
* \brief The videoSurface is needed for reference/prediction.
* The codec manipulates this.
*/
#define FF_VDPAU_STATE_USED_FOR_REFERENCE 2
/**
* \brief This structure is used as a callback between the FFmpeg
* decoder (vd_) and presentation (vo_) module.
* This is used for defining a video frame containing surface,
* picture parameter, bitstream information etc which are passed
* between the FFmpeg decoder and its clients.
*/
struct vdpau_render_state {
VdpVideoSurface surface; ///< Used as rendered surface, never changed.
int state; ///< Holds FF_VDPAU_STATE_* values.
/** Describe size/location of the compressed video data.
Set to 0 when freeing bitstream_buffers. */
int bitstream_buffers_allocated;
int bitstream_buffers_used;
/** The user is responsible for freeing this buffer using av_freep(). */
VdpBitstreamBuffer *bitstream_buffers;
/** picture parameter information for all supported codecs */
union VdpPictureInfo {
VdpPictureInfoH264 h264;
VdpPictureInfoMPEG1Or2 mpeg;
VdpPictureInfoVC1 vc1;
VdpPictureInfoMPEG4Part2 mpeg4;
} info;
};
/* @}*/
#endif /* AVCODEC_VDPAU_H */
/*
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_VERSION_H
#define AVCODEC_VERSION_H
#define LIBAVCODEC_VERSION_MAJOR 53
#define LIBAVCODEC_VERSION_MINOR 7
#define LIBAVCODEC_VERSION_MICRO 0
#define LIBAVCODEC_VERSION_INT AV_VERSION_INT(LIBAVCODEC_VERSION_MAJOR, \
LIBAVCODEC_VERSION_MINOR, \
LIBAVCODEC_VERSION_MICRO)
#define LIBAVCODEC_VERSION AV_VERSION(LIBAVCODEC_VERSION_MAJOR, \
LIBAVCODEC_VERSION_MINOR, \
LIBAVCODEC_VERSION_MICRO)
#define LIBAVCODEC_BUILD LIBAVCODEC_VERSION_INT
#define LIBAVCODEC_IDENT "Lavc" AV_STRINGIFY(LIBAVCODEC_VERSION)
/**
* Those FF_API_* defines are not part of public API.
* They may change, break or disappear at any time.
*/
#ifndef FF_API_PALETTE_CONTROL
#define FF_API_PALETTE_CONTROL (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_OLD_SAMPLE_FMT
#define FF_API_OLD_SAMPLE_FMT (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_OLD_AUDIOCONVERT
#define FF_API_OLD_AUDIOCONVERT (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_ANTIALIAS_ALGO
#define FF_API_ANTIALIAS_ALGO (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_REQUEST_CHANNELS
#define FF_API_REQUEST_CHANNELS (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_OPT_H
#define FF_API_OPT_H (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_THREAD_INIT
#define FF_API_THREAD_INIT (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_OLD_FF_PICT_TYPES
#define FF_API_OLD_FF_PICT_TYPES (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_FLAC_GLOBAL_OPTS
#define FF_API_FLAC_GLOBAL_OPTS (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_GET_PIX_FMT_NAME
#define FF_API_GET_PIX_FMT_NAME (LIBAVCODEC_VERSION_MAJOR < 54)
#endif
#endif /* AVCODEC_VERSION_H */
/*
* Copyright (C) 2003 Ivan Kalvachev
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_XVMC_H
#define AVCODEC_XVMC_H
#include <X11/extensions/XvMC.h>
#include "avcodec.h"
#define AV_XVMC_ID 0x1DC711C0 /**< special value to ensure that regular pixel routines haven't corrupted the struct
the number is 1337 speak for the letters IDCT MCo (motion compensation) */
struct xvmc_pix_fmt {
/** The field contains the special constant value AV_XVMC_ID.
It is used as a test that the application correctly uses the API,
and that there is no corruption caused by pixel routines.
- application - set during initialization
- libavcodec - unchanged
*/
int xvmc_id;
/** Pointer to the block array allocated by XvMCCreateBlocks().
The array has to be freed by XvMCDestroyBlocks().
Each group of 64 values represents one data block of differential
pixel information (in MoCo mode) or coefficients for IDCT.
- application - set the pointer during initialization
- libavcodec - fills coefficients/pixel data into the array
*/
short* data_blocks;
/** Pointer to the macroblock description array allocated by
XvMCCreateMacroBlocks() and freed by XvMCDestroyMacroBlocks().
- application - set the pointer during initialization
- libavcodec - fills description data into the array
*/
XvMCMacroBlock* mv_blocks;
/** Number of macroblock descriptions that can be stored in the mv_blocks
array.
- application - set during initialization
- libavcodec - unchanged
*/
int allocated_mv_blocks;
/** Number of blocks that can be stored at once in the data_blocks array.
- application - set during initialization
- libavcodec - unchanged
*/
int allocated_data_blocks;
/** Indicate that the hardware would interpret data_blocks as IDCT
coefficients and perform IDCT on them.
- application - set during initialization
- libavcodec - unchanged
*/
int idct;
/** In MoCo mode it indicates that intra macroblocks are assumed to be in
unsigned format; same as the XVMC_INTRA_UNSIGNED flag.
- application - set during initialization
- libavcodec - unchanged
*/
int unsigned_intra;
/** Pointer to the surface allocated by XvMCCreateSurface().
It has to be freed by XvMCDestroySurface() on application exit.
It identifies the frame and its state on the video hardware.
- application - set during initialization
- libavcodec - unchanged
*/
XvMCSurface* p_surface;
/** Set by the decoder before calling ff_draw_horiz_band(),
needed by the XvMCRenderSurface function. */
//@{
/** Pointer to the surface used as past reference
- application - unchanged
- libavcodec - set
*/
XvMCSurface* p_past_surface;
/** Pointer to the surface used as future reference
- application - unchanged
- libavcodec - set
*/
XvMCSurface* p_future_surface;
/** top/bottom field or frame
- application - unchanged
- libavcodec - set
*/
unsigned int picture_structure;
/** XVMC_SECOND_FIELD - 1st or 2nd field in the sequence
- application - unchanged
- libavcodec - set
*/
unsigned int flags;
//}@
/** Number of macroblock descriptions in the mv_blocks array
that have already been passed to the hardware.
- application - zeroes it on get_buffer().
A successful ff_draw_horiz_band() may increment it
with filled_mb_block_num or zero both.
- libavcodec - unchanged
*/
int start_mv_blocks_num;
/** Number of new macroblock descriptions in the mv_blocks array (after
start_mv_blocks_num) that are filled by libavcodec and have to be
passed to the hardware.
- application - zeroes it on get_buffer() or after successful
ff_draw_horiz_band().
- libavcodec - increment with one of each stored MB
*/
int filled_mv_blocks_num;
/** Number of the the next free data block; one data block consists of
64 short values in the data_blocks array.
All blocks before this one have already been claimed by placing their
position into the corresponding block description structure field,
that are part of the mv_blocks array.
- application - zeroes it on get_buffer().
A successful ff_draw_horiz_band() may zero it together
with start_mb_blocks_num.
- libavcodec - each decoded macroblock increases it by the number
of coded blocks it contains.
*/
int next_free_data_block_num;
};
#endif /* AVCODEC_XVMC_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVDEVICE_AVDEVICE_H
#define AVDEVICE_AVDEVICE_H
#include "libavutil/avutil.h"
#include "libavformat/avformat.h"
#define LIBAVDEVICE_VERSION_MAJOR 53
#define LIBAVDEVICE_VERSION_MINOR 1
#define LIBAVDEVICE_VERSION_MICRO 1
#define LIBAVDEVICE_VERSION_INT AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, \
LIBAVDEVICE_VERSION_MINOR, \
LIBAVDEVICE_VERSION_MICRO)
#define LIBAVDEVICE_VERSION AV_VERSION(LIBAVDEVICE_VERSION_MAJOR, \
LIBAVDEVICE_VERSION_MINOR, \
LIBAVDEVICE_VERSION_MICRO)
#define LIBAVDEVICE_BUILD LIBAVDEVICE_VERSION_INT
#ifndef FF_API_V4L
#define FF_API_V4L (LIBAVDEVICE_VERSION_MAJOR < 54)
#endif
/**
* Return the LIBAVDEVICE_VERSION_INT constant.
*/
unsigned avdevice_version(void);
/**
* Return the libavdevice build-time configuration.
*/
const char *avdevice_configuration(void);
/**
* Return the libavdevice license.
*/
const char *avdevice_license(void);
/**
* Initialize libavdevice and register all the input and output devices.
* @warning This function is not thread safe.
*/
void avdevice_register_all(void);
#endif /* AVDEVICE_AVDEVICE_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFILTER_AVCODEC_H
#define AVFILTER_AVCODEC_H
/**
* @file
* libavcodec/libavfilter gluing utilities
*
* This should be included in an application ONLY if the installed
* libavfilter has been compiled with libavcodec support, otherwise
* symbols defined below will not be available.
*/
#include "libavcodec/avcodec.h" // AVFrame
#include "avfilter.h"
#include "vsrc_buffer.h"
/**
* Copy the frame properties of src to dst, without copying the actual
* image data.
*/
void avfilter_copy_frame_props(AVFilterBufferRef *dst, const AVFrame *src);
/**
* Create and return a picref reference from the data and properties
* contained in frame.
*
* @param perms permissions to assign to the new buffer reference
*/
AVFilterBufferRef *avfilter_get_video_buffer_ref_from_frame(const AVFrame *frame, int perms);
/**
* Fill an AVFrame with the information stored in picref.
*
* @param frame an already allocated AVFrame
* @param picref a video buffer reference
* @return 0 in case of success, a negative AVERROR code in case of
* failure
*/
int avfilter_fill_frame_from_video_buffer_ref(AVFrame *frame,
const AVFilterBufferRef *picref);
/**
* Add frame data to buffer_src.
*
* @param buffer_src pointer to a buffer source context
* @param flags a combination of AV_VSRC_BUF_FLAG_* flags
* @return >= 0 in case of success, a negative AVERROR code in case of
* failure
*/
int av_vsrc_buffer_add_frame(AVFilterContext *buffer_src,
const AVFrame *frame, int flags);
#endif /* AVFILTER_AVCODEC_H */
/*
* filter layer
* Copyright (c) 2007 Bobby Bingham
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFILTER_AVFILTER_H
#define AVFILTER_AVFILTER_H
#include "libavutil/avutil.h"
#include "libavutil/samplefmt.h"
#define LIBAVFILTER_VERSION_MAJOR 2
#define LIBAVFILTER_VERSION_MINOR 23
#define LIBAVFILTER_VERSION_MICRO 0
#define LIBAVFILTER_VERSION_INT AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, \
LIBAVFILTER_VERSION_MINOR, \
LIBAVFILTER_VERSION_MICRO)
#define LIBAVFILTER_VERSION AV_VERSION(LIBAVFILTER_VERSION_MAJOR, \
LIBAVFILTER_VERSION_MINOR, \
LIBAVFILTER_VERSION_MICRO)
#define LIBAVFILTER_BUILD LIBAVFILTER_VERSION_INT
#include <stddef.h>
/**
* Return the LIBAVFILTER_VERSION_INT constant.
*/
unsigned avfilter_version(void);
/**
* Return the libavfilter build-time configuration.
*/
const char *avfilter_configuration(void);
/**
* Return the libavfilter license.
*/
const char *avfilter_license(void);
typedef struct AVFilterContext AVFilterContext;
typedef struct AVFilterLink AVFilterLink;
typedef struct AVFilterPad AVFilterPad;
/**
* A reference-counted buffer data type used by the filter system. Filters
* should not store pointers to this structure directly, but instead use the
* AVFilterBufferRef structure below.
*/
typedef struct AVFilterBuffer {
uint8_t *data[8]; ///< buffer data for each plane/channel
int linesize[8]; ///< number of bytes per line
unsigned refcount; ///< number of references to this buffer
/** private data to be used by a custom free function */
void *priv;
/**
* A pointer to the function to deallocate this buffer if the default
* function is not sufficient. This could, for example, add the memory
* back into a memory pool to be reused later without the overhead of
* reallocating it from scratch.
*/
void (*free)(struct AVFilterBuffer *buf);
int format; ///< media format
int w, h; ///< width and height of the allocated buffer
} AVFilterBuffer;
#define AV_PERM_READ 0x01 ///< can read from the buffer
#define AV_PERM_WRITE 0x02 ///< can write to the buffer
#define AV_PERM_PRESERVE 0x04 ///< nobody else can overwrite the buffer
#define AV_PERM_REUSE 0x08 ///< can output the buffer multiple times, with the same contents each time
#define AV_PERM_REUSE2 0x10 ///< can output the buffer multiple times, modified each time
#define AV_PERM_NEG_LINESIZES 0x20 ///< the buffer requested can have negative linesizes
/**
* Audio specific properties in a reference to an AVFilterBuffer. Since
* AVFilterBufferRef is common to different media formats, audio specific
* per reference properties must be separated out.
*/
typedef struct AVFilterBufferRefAudioProps {
int64_t channel_layout; ///< channel layout of audio buffer
int nb_samples; ///< number of audio samples per channel
uint32_t sample_rate; ///< audio buffer sample rate
int planar; ///< audio buffer - planar or packed
} AVFilterBufferRefAudioProps;
/**
* Video specific properties in a reference to an AVFilterBuffer. Since
* AVFilterBufferRef is common to different media formats, video specific
* per reference properties must be separated out.
*/
typedef struct AVFilterBufferRefVideoProps {
int w; ///< image width
int h; ///< image height
AVRational sample_aspect_ratio; ///< sample aspect ratio
int interlaced; ///< is frame interlaced
int top_field_first; ///< field order
enum AVPictureType pict_type; ///< picture type of the frame
int key_frame; ///< 1 -> keyframe, 0-> not
} AVFilterBufferRefVideoProps;
/**
* A reference to an AVFilterBuffer. Since filters can manipulate the origin of
* a buffer to, for example, crop image without any memcpy, the buffer origin
* and dimensions are per-reference properties. Linesize is also useful for
* image flipping, frame to field filters, etc, and so is also per-reference.
*
* TODO: add anything necessary for frame reordering
*/
typedef struct AVFilterBufferRef {
AVFilterBuffer *buf; ///< the buffer that this is a reference to
uint8_t *data[8]; ///< picture/audio data for each plane
int linesize[8]; ///< number of bytes per line
int format; ///< media format
/**
* presentation timestamp. The time unit may change during
* filtering, as it is specified in the link and the filter code
* may need to rescale the PTS accordingly.
*/
int64_t pts;
int64_t pos; ///< byte position in stream, -1 if unknown
int perms; ///< permissions, see the AV_PERM_* flags
enum AVMediaType type; ///< media type of buffer data
AVFilterBufferRefVideoProps *video; ///< video buffer specific properties
AVFilterBufferRefAudioProps *audio; ///< audio buffer specific properties
} AVFilterBufferRef;
/**
* Copy properties of src to dst, without copying the actual data
*/
static inline void avfilter_copy_buffer_ref_props(AVFilterBufferRef *dst, AVFilterBufferRef *src)
{
// copy common properties
dst->pts = src->pts;
dst->pos = src->pos;
switch (src->type) {
case AVMEDIA_TYPE_VIDEO: *dst->video = *src->video; break;
case AVMEDIA_TYPE_AUDIO: *dst->audio = *src->audio; break;
}
}
/**
* Add a new reference to a buffer.
*
* @param ref an existing reference to the buffer
* @param pmask a bitmask containing the allowable permissions in the new
* reference
* @return a new reference to the buffer with the same properties as the
* old, excluding any permissions denied by pmask
*/
AVFilterBufferRef *avfilter_ref_buffer(AVFilterBufferRef *ref, int pmask);
/**
* Remove a reference to a buffer. If this is the last reference to the
* buffer, the buffer itself is also automatically freed.
*
* @param ref reference to the buffer, may be NULL
*/
void avfilter_unref_buffer(AVFilterBufferRef *ref);
/**
* A list of supported formats for one end of a filter link. This is used
* during the format negotiation process to try to pick the best format to
* use to minimize the number of necessary conversions. Each filter gives a
* list of the formats supported by each input and output pad. The list
* given for each pad need not be distinct - they may be references to the
* same list of formats, as is often the case when a filter supports multiple
* formats, but will always output the same format as it is given in input.
*
* In this way, a list of possible input formats and a list of possible
* output formats are associated with each link. When a set of formats is
* negotiated over a link, the input and output lists are merged to form a
* new list containing only the common elements of each list. In the case
* that there were no common elements, a format conversion is necessary.
* Otherwise, the lists are merged, and all other links which reference
* either of the format lists involved in the merge are also affected.
*
* For example, consider the filter chain:
* filter (a) --> (b) filter (b) --> (c) filter
*
* where the letters in parenthesis indicate a list of formats supported on
* the input or output of the link. Suppose the lists are as follows:
* (a) = {A, B}
* (b) = {A, B, C}
* (c) = {B, C}
*
* First, the first link's lists are merged, yielding:
* filter (a) --> (a) filter (a) --> (c) filter
*
* Notice that format list (b) now refers to the same list as filter list (a).
* Next, the lists for the second link are merged, yielding:
* filter (a) --> (a) filter (a) --> (a) filter
*
* where (a) = {B}.
*
* Unfortunately, when the format lists at the two ends of a link are merged,
* we must ensure that all links which reference either pre-merge format list
* get updated as well. Therefore, we have the format list structure store a
* pointer to each of the pointers to itself.
*/
typedef struct AVFilterFormats {
unsigned format_count; ///< number of formats
int64_t *formats; ///< list of media formats
unsigned refcount; ///< number of references to this list
struct AVFilterFormats ***refs; ///< references to this list
} AVFilterFormats;
/**
* Create a list of supported formats. This is intended for use in
* AVFilter->query_formats().
*
* @param fmts list of media formats, terminated by -1. If NULL an
* empty list is created.
* @return the format list, with no existing references
*/
AVFilterFormats *avfilter_make_format_list(const int *fmts);
AVFilterFormats *avfilter_make_format64_list(const int64_t *fmts);
/**
* Add fmt to the list of media formats contained in *avff.
* If *avff is NULL the function allocates the filter formats struct
* and puts its pointer in *avff.
*
* @return a non negative value in case of success, or a negative
* value corresponding to an AVERROR code in case of error
*/
int avfilter_add_format(AVFilterFormats **avff, int64_t fmt);
/**
* Return a list of all formats supported by FFmpeg for the given media type.
*/
AVFilterFormats *avfilter_all_formats(enum AVMediaType type);
/**
* Return a list of all channel layouts supported by FFmpeg.
*/
AVFilterFormats *avfilter_all_channel_layouts(void);
/**
* Return a format list which contains the intersection of the formats of
* a and b. Also, all the references of a, all the references of b, and
* a and b themselves will be deallocated.
*
* If a and b do not share any common formats, neither is modified, and NULL
* is returned.
*/
AVFilterFormats *avfilter_merge_formats(AVFilterFormats *a, AVFilterFormats *b);
/**
* Add *ref as a new reference to formats.
* That is the pointers will point like in the ascii art below:
* ________
* |formats |<--------.
* | ____ | ____|___________________
* | |refs| | | __|_
* | |* * | | | | | | AVFilterLink
* | |* *--------->|*ref|
* | |____| | | |____|
* |________| |________________________
*/
void avfilter_formats_ref(AVFilterFormats *formats, AVFilterFormats **ref);
/**
* If *ref is non-NULL, remove *ref as a reference to the format list
* it currently points to, deallocates that list if this was the last
* reference, and sets *ref to NULL.
*
* Before After
* ________ ________ NULL
* |formats |<--------. |formats | ^
* | ____ | ____|________________ | ____ | ____|________________
* | |refs| | | __|_ | |refs| | | __|_
* | |* * | | | | | | AVFilterLink | |* * | | | | | | AVFilterLink
* | |* *--------->|*ref| | |* | | | |*ref|
* | |____| | | |____| | |____| | | |____|
* |________| |_____________________ |________| |_____________________
*/
void avfilter_formats_unref(AVFilterFormats **ref);
/**
*
* Before After
* ________ ________
* |formats |<---------. |formats |<---------.
* | ____ | ___|___ | ____ | ___|___
* | |refs| | | | | | |refs| | | | | NULL
* | |* *--------->|*oldref| | |* *--------->|*newref| ^
* | |* * | | |_______| | |* * | | |_______| ___|___
* | |____| | | |____| | | | |
* |________| |________| |*oldref|
* |_______|
*/
void avfilter_formats_changeref(AVFilterFormats **oldref,
AVFilterFormats **newref);
/**
* A filter pad used for either input or output.
*/
struct AVFilterPad {
/**
* Pad name. The name is unique among inputs and among outputs, but an
* input may have the same name as an output. This may be NULL if this
* pad has no need to ever be referenced by name.
*/
const char *name;
/**
* AVFilterPad type. Only video supported now, hopefully someone will
* add audio in the future.
*/
enum AVMediaType type;
/**
* Minimum required permissions on incoming buffers. Any buffer with
* insufficient permissions will be automatically copied by the filter
* system to a new buffer which provides the needed access permissions.
*
* Input pads only.
*/
int min_perms;
/**
* Permissions which are not accepted on incoming buffers. Any buffer
* which has any of these permissions set will be automatically copied
* by the filter system to a new buffer which does not have those
* permissions. This can be used to easily disallow buffers with
* AV_PERM_REUSE.
*
* Input pads only.
*/
int rej_perms;
/**
* Callback called before passing the first slice of a new frame. If
* NULL, the filter layer will default to storing a reference to the
* picture inside the link structure.
*
* Input video pads only.
*/
void (*start_frame)(AVFilterLink *link, AVFilterBufferRef *picref);
/**
* Callback function to get a video buffer. If NULL, the filter system will
* use avfilter_default_get_video_buffer().
*
* Input video pads only.
*/
AVFilterBufferRef *(*get_video_buffer)(AVFilterLink *link, int perms, int w, int h);
/**
* Callback function to get an audio buffer. If NULL, the filter system will
* use avfilter_default_get_audio_buffer().
*
* Input audio pads only.
*/
AVFilterBufferRef *(*get_audio_buffer)(AVFilterLink *link, int perms,
enum AVSampleFormat sample_fmt, int nb_samples,
int64_t channel_layout, int planar);
/**
* Callback called after the slices of a frame are completely sent. If
* NULL, the filter layer will default to releasing the reference stored
* in the link structure during start_frame().
*
* Input video pads only.
*/
void (*end_frame)(AVFilterLink *link);
/**
* Slice drawing callback. This is where a filter receives video data
* and should do its processing.
*
* Input video pads only.
*/
void (*draw_slice)(AVFilterLink *link, int y, int height, int slice_dir);
/**
* Samples filtering callback. This is where a filter receives audio data
* and should do its processing.
*
* Input audio pads only.
*/
void (*filter_samples)(AVFilterLink *link, AVFilterBufferRef *samplesref);
/**
* Frame poll callback. This returns the number of immediately available
* samples. It should return a positive value if the next request_frame()
* is guaranteed to return one frame (with no delay).
*
* Defaults to just calling the source poll_frame() method.
*
* Output video pads only.
*/
int (*poll_frame)(AVFilterLink *link);
/**
* Frame request callback. A call to this should result in at least one
* frame being output over the given link. This should return zero on
* success, and another value on error.
*
* Output video pads only.
*/
int (*request_frame)(AVFilterLink *link);
/**
* Link configuration callback.
*
* For output pads, this should set the link properties such as
* width/height. This should NOT set the format property - that is
* negotiated between filters by the filter system using the
* query_formats() callback before this function is called.
*
* For input pads, this should check the properties of the link, and update
* the filter's internal state as necessary.
*
* For both input and output filters, this should return zero on success,
* and another value on error.
*/
int (*config_props)(AVFilterLink *link);
};
/** default handler for start_frame() for video inputs */
void avfilter_default_start_frame(AVFilterLink *link, AVFilterBufferRef *picref);
/** default handler for draw_slice() for video inputs */
void avfilter_default_draw_slice(AVFilterLink *link, int y, int h, int slice_dir);
/** default handler for end_frame() for video inputs */
void avfilter_default_end_frame(AVFilterLink *link);
/** default handler for filter_samples() for audio inputs */
void avfilter_default_filter_samples(AVFilterLink *link, AVFilterBufferRef *samplesref);
/** default handler for config_props() for audio/video outputs */
int avfilter_default_config_output_link(AVFilterLink *link);
/** default handler for config_props() for audio/video inputs */
int avfilter_default_config_input_link (AVFilterLink *link);
/** default handler for get_video_buffer() for video inputs */
AVFilterBufferRef *avfilter_default_get_video_buffer(AVFilterLink *link,
int perms, int w, int h);
/** default handler for get_audio_buffer() for audio inputs */
AVFilterBufferRef *avfilter_default_get_audio_buffer(AVFilterLink *link, int perms,
enum AVSampleFormat sample_fmt, int nb_samples,
int64_t channel_layout, int planar);
/**
* Helpers for query_formats() which set all links to the same list of
* formats/layouts. If there are no links hooked to this filter, the list
* of formats is freed.
*/
void avfilter_set_common_pixel_formats(AVFilterContext *ctx, AVFilterFormats *formats);
void avfilter_set_common_sample_formats(AVFilterContext *ctx, AVFilterFormats *formats);
void avfilter_set_common_channel_layouts(AVFilterContext *ctx, AVFilterFormats *formats);
/** Default handler for query_formats() */
int avfilter_default_query_formats(AVFilterContext *ctx);
/** start_frame() handler for filters which simply pass video along */
void avfilter_null_start_frame(AVFilterLink *link, AVFilterBufferRef *picref);
/** draw_slice() handler for filters which simply pass video along */
void avfilter_null_draw_slice(AVFilterLink *link, int y, int h, int slice_dir);
/** end_frame() handler for filters which simply pass video along */
void avfilter_null_end_frame(AVFilterLink *link);
/** filter_samples() handler for filters which simply pass audio along */
void avfilter_null_filter_samples(AVFilterLink *link, AVFilterBufferRef *samplesref);
/** get_video_buffer() handler for filters which simply pass video along */
AVFilterBufferRef *avfilter_null_get_video_buffer(AVFilterLink *link,
int perms, int w, int h);
/** get_audio_buffer() handler for filters which simply pass audio along */
AVFilterBufferRef *avfilter_null_get_audio_buffer(AVFilterLink *link, int perms,
enum AVSampleFormat sample_fmt, int size,
int64_t channel_layout, int planar);
/**
* Filter definition. This defines the pads a filter contains, and all the
* callback functions used to interact with the filter.
*/
typedef struct AVFilter {
const char *name; ///< filter name
int priv_size; ///< size of private data to allocate for the filter
/**
* Filter initialization function. Args contains the user-supplied
* parameters. FIXME: maybe an AVOption-based system would be better?
* opaque is data provided by the code requesting creation of the filter,
* and is used to pass data to the filter.
*/
int (*init)(AVFilterContext *ctx, const char *args, void *opaque);
/**
* Filter uninitialization function. Should deallocate any memory held
* by the filter, release any buffer references, etc. This does not need
* to deallocate the AVFilterContext->priv memory itself.
*/
void (*uninit)(AVFilterContext *ctx);
/**
* Queries formats/layouts supported by the filter and its pads, and sets
* the in_formats/in_chlayouts for links connected to its output pads,
* and out_formats/out_chlayouts for links connected to its input pads.
*
* @return zero on success, a negative value corresponding to an
* AVERROR code otherwise
*/
int (*query_formats)(AVFilterContext *);
const AVFilterPad *inputs; ///< NULL terminated list of inputs. NULL if none
const AVFilterPad *outputs; ///< NULL terminated list of outputs. NULL if none
/**
* A description for the filter. You should use the
* NULL_IF_CONFIG_SMALL() macro to define it.
*/
const char *description;
} AVFilter;
/** An instance of a filter */
struct AVFilterContext {
const AVClass *av_class; ///< needed for av_log()
AVFilter *filter; ///< the AVFilter of which this is an instance
char *name; ///< name of this filter instance
unsigned input_count; ///< number of input pads
AVFilterPad *input_pads; ///< array of input pads
AVFilterLink **inputs; ///< array of pointers to input links
unsigned output_count; ///< number of output pads
AVFilterPad *output_pads; ///< array of output pads
AVFilterLink **outputs; ///< array of pointers to output links
void *priv; ///< private data for use by the filter
};
/**
* A link between two filters. This contains pointers to the source and
* destination filters between which this link exists, and the indexes of
* the pads involved. In addition, this link also contains the parameters
* which have been negotiated and agreed upon between the filter, such as
* image dimensions, format, etc.
*/
struct AVFilterLink {
AVFilterContext *src; ///< source filter
AVFilterPad *srcpad; ///< output pad on the source filter
AVFilterContext *dst; ///< dest filter
AVFilterPad *dstpad; ///< input pad on the dest filter
/** stage of the initialization of the link properties (dimensions, etc) */
enum {
AVLINK_UNINIT = 0, ///< not started
AVLINK_STARTINIT, ///< started, but incomplete
AVLINK_INIT ///< complete
} init_state;
enum AVMediaType type; ///< filter media type
/* These parameters apply only to video */
int w; ///< agreed upon image width
int h; ///< agreed upon image height
AVRational sample_aspect_ratio; ///< agreed upon sample aspect ratio
/* These two parameters apply only to audio */
int64_t channel_layout; ///< channel layout of current buffer (see libavutil/audioconvert.h)
int64_t sample_rate; ///< samples per second
int format; ///< agreed upon media format
/**
* Lists of formats and channel layouts supported by the input and output
* filters respectively. These lists are used for negotiating the format
* to actually be used, which will be loaded into the format and
* channel_layout members, above, when chosen.
*
*/
AVFilterFormats *in_formats;
AVFilterFormats *out_formats;
AVFilterFormats *in_chlayouts;
AVFilterFormats *out_chlayouts;
/**
* The buffer reference currently being sent across the link by the source
* filter. This is used internally by the filter system to allow
* automatic copying of buffers which do not have sufficient permissions
* for the destination. This should not be accessed directly by the
* filters.
*/
AVFilterBufferRef *src_buf;
AVFilterBufferRef *cur_buf;
AVFilterBufferRef *out_buf;
/**
* Define the time base used by the PTS of the frames/samples
* which will pass through this link.
* During the configuration stage, each filter is supposed to
* change only the output timebase, while the timebase of the
* input link is assumed to be an unchangeable property.
*/
AVRational time_base;
struct AVFilterPool *pool;
};
/**
* Link two filters together.
*
* @param src the source filter
* @param srcpad index of the output pad on the source filter
* @param dst the destination filter
* @param dstpad index of the input pad on the destination filter
* @return zero on success
*/
int avfilter_link(AVFilterContext *src, unsigned srcpad,
AVFilterContext *dst, unsigned dstpad);
/**
* Free the link in *link, and set its pointer to NULL.
*/
void avfilter_link_free(AVFilterLink **link);
/**
* Negotiate the media format, dimensions, etc of all inputs to a filter.
*
* @param filter the filter to negotiate the properties for its inputs
* @return zero on successful negotiation
*/
int avfilter_config_links(AVFilterContext *filter);
/**
* Request a picture buffer with a specific set of permissions.
*
* @param link the output link to the filter from which the buffer will
* be requested
* @param perms the required access permissions
* @param w the minimum width of the buffer to allocate
* @param h the minimum height of the buffer to allocate
* @return A reference to the buffer. This must be unreferenced with
* avfilter_unref_buffer when you are finished with it.
*/
AVFilterBufferRef *avfilter_get_video_buffer(AVFilterLink *link, int perms,
int w, int h);
/**
* Create a buffer reference wrapped around an already allocated image
* buffer.
*
* @param data pointers to the planes of the image to reference
* @param linesize linesizes for the planes of the image to reference
* @param perms the required access permissions
* @param w the width of the image specified by the data and linesize arrays
* @param h the height of the image specified by the data and linesize arrays
* @param format the pixel format of the image specified by the data and linesize arrays
*/
AVFilterBufferRef *
avfilter_get_video_buffer_ref_from_arrays(uint8_t * const data[4], const int linesize[4], int perms,
int w, int h, enum PixelFormat format);
/**
* Request an audio samples buffer with a specific set of permissions.
*
* @param link the output link to the filter from which the buffer will
* be requested
* @param perms the required access permissions
* @param sample_fmt the format of each sample in the buffer to allocate
* @param nb_samples the number of samples per channel
* @param channel_layout the number and type of channels per sample in the buffer to allocate
* @param planar audio data layout - planar or packed
* @return A reference to the samples. This must be unreferenced with
* avfilter_unref_buffer when you are finished with it.
*/
AVFilterBufferRef *avfilter_get_audio_buffer(AVFilterLink *link, int perms,
enum AVSampleFormat sample_fmt, int nb_samples,
int64_t channel_layout, int planar);
/**
* Create an audio buffer reference wrapped around an already
* allocated samples buffer.
*
* @param data pointers to the samples plane buffers
* @param linesize linesize for the samples plane buffers
* @param perms the required access permissions
* @param nb_samples number of samples per channel
* @param sample_fmt the format of each sample in the buffer to allocate
* @param channel_layout the channel layout of the buffer
* @param planar audio data layout - planar or packed
*/
AVFilterBufferRef *
avfilter_get_audio_buffer_ref_from_arrays(uint8_t *data[8], int linesize[8], int perms,
int nb_samples, enum AVSampleFormat sample_fmt,
int64_t channel_layout, int planar);
/**
* Request an input frame from the filter at the other end of the link.
*
* @param link the input link
* @return zero on success
*/
int avfilter_request_frame(AVFilterLink *link);
/**
* Poll a frame from the filter chain.
*
* @param link the input link
* @return the number of immediately available frames, a negative
* number in case of error
*/
int avfilter_poll_frame(AVFilterLink *link);
/**
* Notifie the next filter of the start of a frame.
*
* @param link the output link the frame will be sent over
* @param picref A reference to the frame about to be sent. The data for this
* frame need only be valid once draw_slice() is called for that
* portion. The receiving filter will free this reference when
* it no longer needs it.
*/
void avfilter_start_frame(AVFilterLink *link, AVFilterBufferRef *picref);
/**
* Notifie the next filter that the current frame has finished.
*
* @param link the output link the frame was sent over
*/
void avfilter_end_frame(AVFilterLink *link);
/**
* Send a slice to the next filter.
*
* Slices have to be provided in sequential order, either in
* top-bottom or bottom-top order. If slices are provided in
* non-sequential order the behavior of the function is undefined.
*
* @param link the output link over which the frame is being sent
* @param y offset in pixels from the top of the image for this slice
* @param h height of this slice in pixels
* @param slice_dir the assumed direction for sending slices,
* from the top slice to the bottom slice if the value is 1,
* from the bottom slice to the top slice if the value is -1,
* for other values the behavior of the function is undefined.
*/
void avfilter_draw_slice(AVFilterLink *link, int y, int h, int slice_dir);
/**
* Send a buffer of audio samples to the next filter.
*
* @param link the output link over which the audio samples are being sent
* @param samplesref a reference to the buffer of audio samples being sent. The
* receiving filter will free this reference when it no longer
* needs it or pass it on to the next filter.
*/
void avfilter_filter_samples(AVFilterLink *link, AVFilterBufferRef *samplesref);
/** Initialize the filter system. Register all builtin filters. */
void avfilter_register_all(void);
/** Uninitialize the filter system. Unregister all filters. */
void avfilter_uninit(void);
/**
* Register a filter. This is only needed if you plan to use
* avfilter_get_by_name later to lookup the AVFilter structure by name. A
* filter can still by instantiated with avfilter_open even if it is not
* registered.
*
* @param filter the filter to register
* @return 0 if the registration was succesfull, a negative value
* otherwise
*/
int avfilter_register(AVFilter *filter);
/**
* Get a filter definition matching the given name.
*
* @param name the filter name to find
* @return the filter definition, if any matching one is registered.
* NULL if none found.
*/
AVFilter *avfilter_get_by_name(const char *name);
/**
* If filter is NULL, returns a pointer to the first registered filter pointer,
* if filter is non-NULL, returns the next pointer after filter.
* If the returned pointer points to NULL, the last registered filter
* was already reached.
*/
AVFilter **av_filter_next(AVFilter **filter);
/**
* Create a filter instance.
*
* @param filter_ctx put here a pointer to the created filter context
* on success, NULL on failure
* @param filter the filter to create an instance of
* @param inst_name Name to give to the new instance. Can be NULL for none.
* @return >= 0 in case of success, a negative error code otherwise
*/
int avfilter_open(AVFilterContext **filter_ctx, AVFilter *filter, const char *inst_name);
/**
* Initialize a filter.
*
* @param filter the filter to initialize
* @param args A string of parameters to use when initializing the filter.
* The format and meaning of this string varies by filter.
* @param opaque Any extra non-string data needed by the filter. The meaning
* of this parameter varies by filter.
* @return zero on success
*/
int avfilter_init_filter(AVFilterContext *filter, const char *args, void *opaque);
/**
* Free a filter context.
*
* @param filter the filter to free
*/
void avfilter_free(AVFilterContext *filter);
/**
* Insert a filter in the middle of an existing link.
*
* @param link the link into which the filter should be inserted
* @param filt the filter to be inserted
* @param filt_srcpad_idx the input pad on the filter to connect
* @param filt_dstpad_idx the output pad on the filter to connect
* @return zero on success
*/
int avfilter_insert_filter(AVFilterLink *link, AVFilterContext *filt,
unsigned filt_srcpad_idx, unsigned filt_dstpad_idx);
/**
* Insert a new pad.
*
* @param idx Insertion point. Pad is inserted at the end if this point
* is beyond the end of the list of pads.
* @param count Pointer to the number of pads in the list
* @param padidx_off Offset within an AVFilterLink structure to the element
* to increment when inserting a new pad causes link
* numbering to change
* @param pads Pointer to the pointer to the beginning of the list of pads
* @param links Pointer to the pointer to the beginning of the list of links
* @param newpad The new pad to add. A copy is made when adding.
*/
void avfilter_insert_pad(unsigned idx, unsigned *count, size_t padidx_off,
AVFilterPad **pads, AVFilterLink ***links,
AVFilterPad *newpad);
/** Insert a new input pad for the filter. */
static inline void avfilter_insert_inpad(AVFilterContext *f, unsigned index,
AVFilterPad *p)
{
avfilter_insert_pad(index, &f->input_count, offsetof(AVFilterLink, dstpad),
&f->input_pads, &f->inputs, p);
}
/** Insert a new output pad for the filter. */
static inline void avfilter_insert_outpad(AVFilterContext *f, unsigned index,
AVFilterPad *p)
{
avfilter_insert_pad(index, &f->output_count, offsetof(AVFilterLink, srcpad),
&f->output_pads, &f->outputs, p);
}
#endif /* AVFILTER_AVFILTER_H */
/*
* Filter graphs
* copyright (c) 2007 Bobby Bingham
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFILTER_AVFILTERGRAPH_H
#define AVFILTER_AVFILTERGRAPH_H
#include "avfilter.h"
typedef struct AVFilterGraph {
unsigned filter_count;
AVFilterContext **filters;
char *scale_sws_opts; ///< sws options to use for the auto-inserted scale filters
} AVFilterGraph;
/**
* Allocate a filter graph.
*/
AVFilterGraph *avfilter_graph_alloc(void);
/**
* Get a filter instance with name name from graph.
*
* @return the pointer to the found filter instance or NULL if it
* cannot be found.
*/
AVFilterContext *avfilter_graph_get_filter(AVFilterGraph *graph, char *name);
/**
* Add an existing filter instance to a filter graph.
*
* @param graphctx the filter graph
* @param filter the filter to be added
*/
int avfilter_graph_add_filter(AVFilterGraph *graphctx, AVFilterContext *filter);
/**
* Create and add a filter instance into an existing graph.
* The filter instance is created from the filter filt and inited
* with the parameters args and opaque.
*
* In case of success put in *filt_ctx the pointer to the created
* filter instance, otherwise set *filt_ctx to NULL.
*
* @param name the instance name to give to the created filter instance
* @param graph_ctx the filter graph
* @return a negative AVERROR error code in case of failure, a non
* negative value otherwise
*/
int avfilter_graph_create_filter(AVFilterContext **filt_ctx, AVFilter *filt,
const char *name, const char *args, void *opaque,
AVFilterGraph *graph_ctx);
/**
* Check validity and configure all the links and formats in the graph.
*
* @param graphctx the filter graph
* @param log_ctx context used for logging
* @return 0 in case of success, a negative AVERROR code otherwise
*/
int avfilter_graph_config(AVFilterGraph *graphctx, void *log_ctx);
/**
* Free a graph, destroy its links, and set *graph to NULL.
* If *graph is NULL, do nothing.
*/
void avfilter_graph_free(AVFilterGraph **graph);
/**
* A linked-list of the inputs/outputs of the filter chain.
*
* This is mainly useful for avfilter_graph_parse(), since this
* function may accept a description of a graph with not connected
* input/output pads. This struct specifies, per each not connected
* pad contained in the graph, the filter context and the pad index
* required for establishing a link.
*/
typedef struct AVFilterInOut {
/** unique name for this input/output in the list */
char *name;
/** filter context associated to this input/output */
AVFilterContext *filter_ctx;
/** index of the filt_ctx pad to use for linking */
int pad_idx;
/** next input/input in the list, NULL if this is the last */
struct AVFilterInOut *next;
} AVFilterInOut;
/**
* Create an AVFilterInOut.
* Must be free with avfilter_inout_free().
*/
AVFilterInOut *avfilter_inout_alloc(void);
/**
* Free the AVFilterInOut in *inout, and set its pointer to NULL.
* If *inout is NULL, do nothing.
*/
void avfilter_inout_free(AVFilterInOut **inout);
/**
* Add a graph described by a string to a graph.
*
* @param graph the filter graph where to link the parsed graph context
* @param filters string to be parsed
* @param inputs linked list to the inputs of the graph, may be NULL.
* It is updated to contain the list of open inputs after the parsing,
* should be freed with avfilter_inout_free().
* @param outputs linked list to the outputs of the graph, may be NULL.
* It is updated to contain the list of open outputs after the parsing,
* should be freed with avfilter_inout_free().
* @return zero on success, a negative AVERROR code on error
*/
int avfilter_graph_parse(AVFilterGraph *graph, const char *filters,
AVFilterInOut **inputs, AVFilterInOut **outputs,
void *log_ctx);
#endif /* AVFILTER_AVFILTERGRAPH_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFILTER_VSINK_BUFFER_H
#define AVFILTER_VSINK_BUFFER_H
/**
* @file
* memory buffer sink API for video
*/
#include "avfilter.h"
/**
* Tell av_vsink_buffer_get_video_buffer_ref() to read the picref, but not
* remove it from the buffer. This is useful if you need only to read
* the picref, without to fetch it.
*/
#define AV_VSINK_BUF_FLAG_PEEK 1
/**
* Get a video buffer data from buffer_sink and put it in picref.
*
* @param buffer_sink pointer to a buffer sink context
* @param flags a combination of AV_VSINK_BUF_FLAG_* flags
* @return >= 0 in case of success, a negative AVERROR code in case of
* failure
*/
int av_vsink_buffer_get_video_buffer_ref(AVFilterContext *buffer_sink,
AVFilterBufferRef **picref, int flags);
#endif /* AVFILTER_VSINK_BUFFER_H */
/*
* Copyright (c) 2008 Vitor Sessak
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFILTER_VSRC_BUFFER_H
#define AVFILTER_VSRC_BUFFER_H
/**
* @file
* memory buffer source API for video
*/
#include "avfilter.h"
/**
* Tell av_vsrc_buffer_add_video_buffer_ref() to overwrite the already
* cached video buffer with the new added one, otherwise the function
* will complain and exit.
*/
#define AV_VSRC_BUF_FLAG_OVERWRITE 1
/**
* Add video buffer data in picref to buffer_src.
*
* @param buffer_src pointer to a buffer source context
* @param flags a combination of AV_VSRC_BUF_FLAG_* flags
* @return >= 0 in case of success, a negative AVERROR code in case of
* failure
*/
int av_vsrc_buffer_add_video_buffer_ref(AVFilterContext *buffer_src,
AVFilterBufferRef *picref, int flags);
#endif /* AVFILTER_VSRC_BUFFER_H */
/*
* copyright (c) 2001 Fabrice Bellard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFORMAT_AVFORMAT_H
#define AVFORMAT_AVFORMAT_H
/**
* Return the LIBAVFORMAT_VERSION_INT constant.
*/
unsigned avformat_version(void);
/**
* Return the libavformat build-time configuration.
*/
const char *avformat_configuration(void);
/**
* Return the libavformat license.
*/
const char *avformat_license(void);
#include <time.h>
#include <stdio.h> /* FILE */
#include "libavcodec/avcodec.h"
#include "libavutil/dict.h"
#include "avio.h"
#include "libavformat/version.h"
struct AVFormatContext;
/*
* Public Metadata API.
* The metadata API allows libavformat to export metadata tags to a client
* application using a sequence of key/value pairs. Like all strings in FFmpeg,
* metadata must be stored as UTF-8 encoded Unicode. Note that metadata
* exported by demuxers isn't checked to be valid UTF-8 in most cases.
* Important concepts to keep in mind:
* 1. Keys are unique; there can never be 2 tags with the same key. This is
* also meant semantically, i.e., a demuxer should not knowingly produce
* several keys that are literally different but semantically identical.
* E.g., key=Author5, key=Author6. In this example, all authors must be
* placed in the same tag.
* 2. Metadata is flat, not hierarchical; there are no subtags. If you
* want to store, e.g., the email address of the child of producer Alice
* and actor Bob, that could have key=alice_and_bobs_childs_email_address.
* 3. Several modifiers can be applied to the tag name. This is done by
* appending a dash character ('-') and the modifier name in the order
* they appear in the list below -- e.g. foo-eng-sort, not foo-sort-eng.
* a) language -- a tag whose value is localized for a particular language
* is appended with the ISO 639-2/B 3-letter language code.
* For example: Author-ger=Michael, Author-eng=Mike
* The original/default language is in the unqualified "Author" tag.
* A demuxer should set a default if it sets any translated tag.
* b) sorting -- a modified version of a tag that should be used for
* sorting will have '-sort' appended. E.g. artist="The Beatles",
* artist-sort="Beatles, The".
*
* 4. Demuxers attempt to export metadata in a generic format, however tags
* with no generic equivalents are left as they are stored in the container.
* Follows a list of generic tag names:
*
* album -- name of the set this work belongs to
* album_artist -- main creator of the set/album, if different from artist.
* e.g. "Various Artists" for compilation albums.
* artist -- main creator of the work
* comment -- any additional description of the file.
* composer -- who composed the work, if different from artist.
* copyright -- name of copyright holder.
* creation_time-- date when the file was created, preferably in ISO 8601.
* date -- date when the work was created, preferably in ISO 8601.
* disc -- number of a subset, e.g. disc in a multi-disc collection.
* encoder -- name/settings of the software/hardware that produced the file.
* encoded_by -- person/group who created the file.
* filename -- original name of the file.
* genre -- <self-evident>.
* language -- main language in which the work is performed, preferably
* in ISO 639-2 format. Multiple languages can be specified by
* separating them with commas.
* performer -- artist who performed the work, if different from artist.
* E.g for "Also sprach Zarathustra", artist would be "Richard
* Strauss" and performer "London Philharmonic Orchestra".
* publisher -- name of the label/publisher.
* service_name -- name of the service in broadcasting (channel name).
* service_provider -- name of the service provider in broadcasting.
* title -- name of the work.
* track -- number of this work in the set, can be in form current/total.
* variant_bitrate -- the total bitrate of the bitrate variant that the current stream is part of
*/
#if FF_API_OLD_METADATA2
/**
* @defgroup old_metadata Old metadata API
* The following functions are deprecated, use
* their equivalents from libavutil/dict.h instead.
* @{
*/
#define AV_METADATA_MATCH_CASE AV_DICT_MATCH_CASE
#define AV_METADATA_IGNORE_SUFFIX AV_DICT_IGNORE_SUFFIX
#define AV_METADATA_DONT_STRDUP_KEY AV_DICT_DONT_STRDUP_KEY
#define AV_METADATA_DONT_STRDUP_VAL AV_DICT_DONT_STRDUP_VAL
#define AV_METADATA_DONT_OVERWRITE AV_DICT_DONT_OVERWRITE
typedef attribute_deprecated AVDictionary AVMetadata;
typedef attribute_deprecated AVDictionaryEntry AVMetadataTag;
typedef struct AVMetadataConv AVMetadataConv;
/**
* Get a metadata element with matching key.
*
* @param prev Set to the previous matching element to find the next.
* If set to NULL the first matching element is returned.
* @param flags Allows case as well as suffix-insensitive comparisons.
* @return Found tag or NULL, changing key or value leads to undefined behavior.
*/
attribute_deprecated AVDictionaryEntry *
av_metadata_get(AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags);
/**
* Set the given tag in *pm, overwriting an existing tag.
*
* @param pm pointer to a pointer to a metadata struct. If *pm is NULL
* a metadata struct is allocated and put in *pm.
* @param key tag key to add to *pm (will be av_strduped depending on flags)
* @param value tag value to add to *pm (will be av_strduped depending on flags).
* Passing a NULL value will cause an existing tag to be deleted.
* @return >= 0 on success otherwise an error code <0
*/
attribute_deprecated int av_metadata_set2(AVDictionary **pm, const char *key, const char *value, int flags);
/**
* This function is provided for compatibility reason and currently does nothing.
*/
attribute_deprecated void av_metadata_conv(struct AVFormatContext *ctx, const AVMetadataConv *d_conv,
const AVMetadataConv *s_conv);
/**
* Copy metadata from one AVDictionary struct into another.
* @param dst pointer to a pointer to a AVDictionary struct. If *dst is NULL,
* this function will allocate a struct for you and put it in *dst
* @param src pointer to source AVDictionary struct
* @param flags flags to use when setting metadata in *dst
* @note metadata is read using the AV_DICT_IGNORE_SUFFIX flag
*/
attribute_deprecated void av_metadata_copy(AVDictionary **dst, AVDictionary *src, int flags);
/**
* Free all the memory allocated for an AVDictionary struct.
*/
attribute_deprecated void av_metadata_free(AVDictionary **m);
/**
* @}
*/
#endif
/* packet functions */
/**
* Allocate and read the payload of a packet and initialize its
* fields with default values.
*
* @param pkt packet
* @param size desired payload size
* @return >0 (read size) if OK, AVERROR_xxx otherwise
*/
int av_get_packet(AVIOContext *s, AVPacket *pkt, int size);
/**
* Read data and append it to the current content of the AVPacket.
* If pkt->size is 0 this is identical to av_get_packet.
* Note that this uses av_grow_packet and thus involves a realloc
* which is inefficient. Thus this function should only be used
* when there is no reasonable way to know (an upper bound of)
* the final size.
*
* @param pkt packet
* @param size amount of data to read
* @return >0 (read size) if OK, AVERROR_xxx otherwise, previous data
* will not be lost even if an error occurs.
*/
int av_append_packet(AVIOContext *s, AVPacket *pkt, int size);
/*************************************************/
/* fractional numbers for exact pts handling */
/**
* The exact value of the fractional number is: 'val + num / den'.
* num is assumed to be 0 <= num < den.
*/
typedef struct AVFrac {
int64_t val, num, den;
} AVFrac;
/*************************************************/
/* input/output formats */
struct AVCodecTag;
/**
* This structure contains the data a format has to probe a file.
*/
typedef struct AVProbeData {
const char *filename;
unsigned char *buf; /**< Buffer must have AVPROBE_PADDING_SIZE of extra allocated bytes filled with zero. */
int buf_size; /**< Size of buf except extra allocated bytes */
} AVProbeData;
#define AVPROBE_SCORE_MAX 100 ///< maximum score, half of that is used for file-extension-based detection
#define AVPROBE_PADDING_SIZE 32 ///< extra allocated bytes at the end of the probe buffer
typedef struct AVFormatParameters {
#if FF_API_FORMAT_PARAMETERS
attribute_deprecated AVRational time_base;
attribute_deprecated int sample_rate;
attribute_deprecated int channels;
attribute_deprecated int width;
attribute_deprecated int height;
attribute_deprecated enum PixelFormat pix_fmt;
attribute_deprecated int channel; /**< Used to select DV channel. */
attribute_deprecated const char *standard; /**< deprecated, use demuxer-specific options instead. */
attribute_deprecated unsigned int mpeg2ts_raw:1; /**< deprecated, use mpegtsraw demuxer */
/**< deprecated, use mpegtsraw demuxer-specific options instead */
attribute_deprecated unsigned int mpeg2ts_compute_pcr:1;
attribute_deprecated unsigned int initial_pause:1; /**< Do not begin to play the stream
immediately (RTSP only). */
attribute_deprecated unsigned int prealloced_context:1;
#endif
} AVFormatParameters;
//! Demuxer will use avio_open, no opened file should be provided by the caller.
#define AVFMT_NOFILE 0x0001
#define AVFMT_NEEDNUMBER 0x0002 /**< Needs '%d' in filename. */
#define AVFMT_SHOW_IDS 0x0008 /**< Show format stream IDs numbers. */
#define AVFMT_RAWPICTURE 0x0020 /**< Format wants AVPicture structure for
raw picture data. */
#define AVFMT_GLOBALHEADER 0x0040 /**< Format wants global header. */
#define AVFMT_NOTIMESTAMPS 0x0080 /**< Format does not need / have any timestamps. */
#define AVFMT_GENERIC_INDEX 0x0100 /**< Use generic index building code. */
#define AVFMT_TS_DISCONT 0x0200 /**< Format allows timestamp discontinuities. Note, muxers always require valid (monotone) timestamps */
#define AVFMT_VARIABLE_FPS 0x0400 /**< Format allows variable fps. */
#define AVFMT_NODIMENSIONS 0x0800 /**< Format does not need width/height */
#define AVFMT_NOSTREAMS 0x1000 /**< Format does not require any streams */
#define AVFMT_NOBINSEARCH 0x2000 /**< Format does not allow to fallback to binary search via read_timestamp */
#define AVFMT_NOGENSEARCH 0x4000 /**< Format does not allow to fallback to generic search */
#define AVFMT_TS_NONSTRICT 0x8000 /**< Format does not require strictly
increasing timestamps, but they must
still be monotonic */
typedef struct AVOutputFormat {
const char *name;
/**
* Descriptive name for the format, meant to be more human-readable
* than name. You should use the NULL_IF_CONFIG_SMALL() macro
* to define it.
*/
const char *long_name;
const char *mime_type;
const char *extensions; /**< comma-separated filename extensions */
/**
* size of private data so that it can be allocated in the wrapper
*/
int priv_data_size;
/* output support */
enum CodecID audio_codec; /**< default audio codec */
enum CodecID video_codec; /**< default video codec */
int (*write_header)(struct AVFormatContext *);
int (*write_packet)(struct AVFormatContext *, AVPacket *pkt);
int (*write_trailer)(struct AVFormatContext *);
/**
* can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER, AVFMT_RAWPICTURE,
* AVFMT_GLOBALHEADER, AVFMT_NOTIMESTAMPS, AVFMT_VARIABLE_FPS,
* AVFMT_NODIMENSIONS, AVFMT_NOSTREAMS
*/
int flags;
void *dummy;
int (*interleave_packet)(struct AVFormatContext *, AVPacket *out,
AVPacket *in, int flush);
/**
* List of supported codec_id-codec_tag pairs, ordered by "better
* choice first". The arrays are all terminated by CODEC_ID_NONE.
*/
const struct AVCodecTag * const *codec_tag;
enum CodecID subtitle_codec; /**< default subtitle codec */
#if FF_API_OLD_METADATA2
const AVMetadataConv *metadata_conv;
#endif
const AVClass *priv_class; ///< AVClass for the private context
/* private fields */
struct AVOutputFormat *next;
} AVOutputFormat;
typedef struct AVInputFormat {
/**
* A comma separated list of short names for the format. New names
* may be appended with a minor bump.
*/
const char *name;
/**
* Descriptive name for the format, meant to be more human-readable
* than name. You should use the NULL_IF_CONFIG_SMALL() macro
* to define it.
*/
const char *long_name;
/**
* Size of private data so that it can be allocated in the wrapper.
*/
int priv_data_size;
/**
* Tell if a given file has a chance of being parsed as this format.
* The buffer provided is guaranteed to be AVPROBE_PADDING_SIZE bytes
* big so you do not have to check for that unless you need more.
*/
int (*read_probe)(AVProbeData *);
/**
* Read the format header and initialize the AVFormatContext
* structure. Return 0 if OK. 'ap' if non-NULL contains
* additional parameters. Only used in raw format right
* now. 'av_new_stream' should be called to create new streams.
*/
int (*read_header)(struct AVFormatContext *,
AVFormatParameters *ap);
/**
* Read one packet and put it in 'pkt'. pts and flags are also
* set. 'av_new_stream' can be called only if the flag
* AVFMTCTX_NOHEADER is used and only in the calling thread (not in a
* background thread).
* @return 0 on success, < 0 on error.
* When returning an error, pkt must not have been allocated
* or must be freed before returning
*/
int (*read_packet)(struct AVFormatContext *, AVPacket *pkt);
/**
* Close the stream. The AVFormatContext and AVStreams are not
* freed by this function
*/
int (*read_close)(struct AVFormatContext *);
#if FF_API_READ_SEEK
/**
* Seek to a given timestamp relative to the frames in
* stream component stream_index.
* @param stream_index Must not be -1.
* @param flags Selects which direction should be preferred if no exact
* match is available.
* @return >= 0 on success (but not necessarily the new offset)
*/
attribute_deprecated int (*read_seek)(struct AVFormatContext *,
int stream_index, int64_t timestamp, int flags);
#endif
/**
* Gets the next timestamp in stream[stream_index].time_base units.
* @return the timestamp or AV_NOPTS_VALUE if an error occurred
*/
int64_t (*read_timestamp)(struct AVFormatContext *s, int stream_index,
int64_t *pos, int64_t pos_limit);
/**
* Can use flags: AVFMT_NOFILE, AVFMT_NEEDNUMBER.
*/
int flags;
/**
* If extensions are defined, then no probe is done. You should
* usually not use extension format guessing because it is not
* reliable enough
*/
const char *extensions;
/**
* General purpose read-only value that the format can use.
*/
int value;
/**
* Start/resume playing - only meaningful if using a network-based format
* (RTSP).
*/
int (*read_play)(struct AVFormatContext *);
/**
* Pause playing - only meaningful if using a network-based format
* (RTSP).
*/
int (*read_pause)(struct AVFormatContext *);
const struct AVCodecTag * const *codec_tag;
/**
* Seek to timestamp ts.
* Seeking will be done so that the point from which all active streams
* can be presented successfully will be closest to ts and within min/max_ts.
* Active streams are all streams that have AVStream.discard < AVDISCARD_ALL.
*/
int (*read_seek2)(struct AVFormatContext *s, int stream_index, int64_t min_ts, int64_t ts, int64_t max_ts, int flags);
#if FF_API_OLD_METADATA2
const AVMetadataConv *metadata_conv;
#endif
const AVClass *priv_class; ///< AVClass for the private context
/* private fields */
struct AVInputFormat *next;
} AVInputFormat;
enum AVStreamParseType {
AVSTREAM_PARSE_NONE,
AVSTREAM_PARSE_FULL, /**< full parsing and repack */
AVSTREAM_PARSE_HEADERS, /**< Only parse headers, do not repack. */
AVSTREAM_PARSE_TIMESTAMPS, /**< full parsing and interpolation of timestamps for frames not starting on a packet boundary */
AVSTREAM_PARSE_FULL_ONCE, /**< full parsing and repack of the first frame only, only implemented for H.264 currently */
};
typedef struct AVIndexEntry {
int64_t pos;
int64_t timestamp;
#define AVINDEX_KEYFRAME 0x0001
int flags:2;
int size:30; //Yeah, trying to keep the size of this small to reduce memory requirements (it is 24 vs. 32 bytes due to possible 8-byte alignment).
int min_distance; /**< Minimum distance between this and the previous keyframe, used to avoid unneeded searching. */
} AVIndexEntry;
#define AV_DISPOSITION_DEFAULT 0x0001
#define AV_DISPOSITION_DUB 0x0002
#define AV_DISPOSITION_ORIGINAL 0x0004
#define AV_DISPOSITION_COMMENT 0x0008
#define AV_DISPOSITION_LYRICS 0x0010
#define AV_DISPOSITION_KARAOKE 0x0020
/**
* Track should be used during playback by default.
* Useful for subtitle track that should be displayed
* even when user did not explicitly ask for subtitles.
*/
#define AV_DISPOSITION_FORCED 0x0040
#define AV_DISPOSITION_HEARING_IMPAIRED 0x0080 /**< stream for hearing impaired audiences */
#define AV_DISPOSITION_VISUAL_IMPAIRED 0x0100 /**< stream for visual impaired audiences */
#define AV_DISPOSITION_CLEAN_EFFECTS 0x0200 /**< stream without voice */
/**
* Stream structure.
* New fields can be added to the end with minor version bumps.
* Removal, reordering and changes to existing fields require a major
* version bump.
* sizeof(AVStream) must not be used outside libav*.
*/
typedef struct AVStream {
int index; /**< stream index in AVFormatContext */
int id; /**< format-specific stream ID */
AVCodecContext *codec; /**< codec context */
/**
* Real base framerate of the stream.
* This is the lowest framerate with which all timestamps can be
* represented accurately (it is the least common multiple of all
* framerates in the stream). Note, this value is just a guess!
* For example, if the time base is 1/90000 and all frames have either
* approximately 3600 or 1800 timer ticks, then r_frame_rate will be 50/1.
*/
AVRational r_frame_rate;
void *priv_data;
/* internal data used in av_find_stream_info() */
int64_t first_dts;
/**
* encoding: pts generation when outputting stream
*/
struct AVFrac pts;
/**
* This is the fundamental unit of time (in seconds) in terms
* of which frame timestamps are represented. For fixed-fps content,
* time base should be 1/framerate and timestamp increments should be 1.
* decoding: set by libavformat
* encoding: set by libavformat in av_write_header
*/
AVRational time_base;
int pts_wrap_bits; /**< number of bits in pts (used for wrapping control) */
/* ffmpeg.c private use */
int stream_copy; /**< If set, just copy stream. */
enum AVDiscard discard; ///< Selects which packets can be discarded at will and do not need to be demuxed.
//FIXME move stuff to a flags field?
/**
* Quality, as it has been removed from AVCodecContext and put in AVVideoFrame.
* MN: dunno if that is the right place for it
*/
float quality;
/**
* Decoding: pts of the first frame of the stream, in stream time base.
* Only set this if you are absolutely 100% sure that the value you set
* it to really is the pts of the first frame.
* This may be undefined (AV_NOPTS_VALUE).
* @note The ASF header does NOT contain a correct start_time the ASF
* demuxer must NOT set this.
*/
int64_t start_time;
/**
* Decoding: duration of the stream, in stream time base.
* If a source file does not specify a duration, but does specify
* a bitrate, this value will be estimated from bitrate and file size.
*/
int64_t duration;
/* av_read_frame() support */
enum AVStreamParseType need_parsing;
struct AVCodecParserContext *parser;
int64_t cur_dts;
int last_IP_duration;
int64_t last_IP_pts;
/* av_seek_frame() support */
AVIndexEntry *index_entries; /**< Only used if the format does not
support seeking natively. */
int nb_index_entries;
unsigned int index_entries_allocated_size;
int64_t nb_frames; ///< number of frames in this stream if known or 0
int disposition; /**< AV_DISPOSITION_* bit field */
AVProbeData probe_data;
#define MAX_REORDER_DELAY 16
int64_t pts_buffer[MAX_REORDER_DELAY+1];
/**
* sample aspect ratio (0 if unknown)
* - encoding: Set by user.
* - decoding: Set by libavformat.
*/
AVRational sample_aspect_ratio;
AVDictionary *metadata;
/* Intended mostly for av_read_frame() support. Not supposed to be used by */
/* external applications; try to use something else if at all possible. */
const uint8_t *cur_ptr;
int cur_len;
AVPacket cur_pkt;
// Timestamp generation support:
/**
* Timestamp corresponding to the last dts sync point.
*
* Initialized when AVCodecParserContext.dts_sync_point >= 0 and
* a DTS is received from the underlying container. Otherwise set to
* AV_NOPTS_VALUE by default.
*/
int64_t reference_dts;
/**
* Number of packets to buffer for codec probing
* NOT PART OF PUBLIC API
*/
#define MAX_PROBE_PACKETS 2500
int probe_packets;
/**
* last packet in packet_buffer for this stream when muxing.
* used internally, NOT PART OF PUBLIC API, dont read or write from outside of libav*
*/
struct AVPacketList *last_in_packet_buffer;
/**
* Average framerate
*/
AVRational avg_frame_rate;
/**
* Number of frames that have been demuxed during av_find_stream_info()
*/
int codec_info_nb_frames;
/**
* Stream Identifier
* This is the MPEG-TS stream identifier +1
* 0 means unknown
*/
int stream_identifier;
/**
* Stream informations used internally by av_find_stream_info()
*/
#define MAX_STD_TIMEBASES (60*12+5)
struct {
int64_t last_dts;
int64_t duration_gcd;
int duration_count;
double duration_error[MAX_STD_TIMEBASES];
int64_t codec_info_duration;
} *info;
/**
* flag to indicate that probing is requested
* NOT PART OF PUBLIC API
*/
int request_probe;
} AVStream;
#define AV_PROGRAM_RUNNING 1
/**
* New fields can be added to the end with minor version bumps.
* Removal, reordering and changes to existing fields require a major
* version bump.
* sizeof(AVProgram) must not be used outside libav*.
*/
typedef struct AVProgram {
int id;
int flags;
enum AVDiscard discard; ///< selects which program to discard and which to feed to the caller
unsigned int *stream_index;
unsigned int nb_stream_indexes;
AVDictionary *metadata;
int program_num;
int pmt_pid;
int pcr_pid;
} AVProgram;
#define AVFMTCTX_NOHEADER 0x0001 /**< signal that no header is present
(streams are added dynamically) */
typedef struct AVChapter {
int id; ///< unique ID to identify the chapter
AVRational time_base; ///< time base in which the start/end timestamps are specified
int64_t start, end; ///< chapter start/end time in time_base units
AVDictionary *metadata;
} AVChapter;
/**
* Format I/O context.
* New fields can be added to the end with minor version bumps.
* Removal, reordering and changes to existing fields require a major
* version bump.
* sizeof(AVFormatContext) must not be used outside libav*.
*/
typedef struct AVFormatContext {
const AVClass *av_class; /**< Set by avformat_alloc_context. */
/* Can only be iformat or oformat, not both at the same time. */
struct AVInputFormat *iformat;
struct AVOutputFormat *oformat;
void *priv_data;
AVIOContext *pb;
unsigned int nb_streams;
AVStream **streams;
char filename[1024]; /**< input or output filename */
/* stream info */
int64_t timestamp;
int ctx_flags; /**< Format-specific flags, see AVFMTCTX_xx */
/* private data for pts handling (do not modify directly). */
/**
* This buffer is only needed when packets were already buffered but
* not decoded, for example to get the codec parameters in MPEG
* streams.
*/
struct AVPacketList *packet_buffer;
/**
* Decoding: position of the first frame of the component, in
* AV_TIME_BASE fractional seconds. NEVER set this value directly:
* It is deduced from the AVStream values.
*/
int64_t start_time;
/**
* Decoding: duration of the stream, in AV_TIME_BASE fractional
* seconds. Only set this value if you know none of the individual stream
* durations and also dont set any of them. This is deduced from the
* AVStream values if not set.
*/
int64_t duration;
/**
* decoding: total file size, 0 if unknown
*/
int64_t file_size;
/**
* Decoding: total stream bitrate in bit/s, 0 if not
* available. Never set it directly if the file_size and the
* duration are known as FFmpeg can compute it automatically.
*/
int bit_rate;
/* av_read_frame() support */
AVStream *cur_st;
/* av_seek_frame() support */
int64_t data_offset; /**< offset of the first packet */
int mux_rate;
unsigned int packet_size;
int preload;
int max_delay;
#define AVFMT_NOOUTPUTLOOP -1
#define AVFMT_INFINITEOUTPUTLOOP 0
/**
* number of times to loop output in formats that support it
*/
int loop_output;
int flags;
#define AVFMT_FLAG_GENPTS 0x0001 ///< Generate missing pts even if it requires parsing future frames.
#define AVFMT_FLAG_IGNIDX 0x0002 ///< Ignore index.
#define AVFMT_FLAG_NONBLOCK 0x0004 ///< Do not block when reading packets from input.
#define AVFMT_FLAG_IGNDTS 0x0008 ///< Ignore DTS on frames that contain both DTS & PTS
#define AVFMT_FLAG_NOFILLIN 0x0010 ///< Do not infer any values from other values, just return what is stored in the container
#define AVFMT_FLAG_NOPARSE 0x0020 ///< Do not use AVParsers, you also must set AVFMT_FLAG_NOFILLIN as the fillin code works on frames and no parsing -> no frames. Also seeking to frames can not work if parsing to find frame boundaries has been disabled
#if FF_API_FLAG_RTP_HINT
#define AVFMT_FLAG_RTP_HINT 0x0040 ///< Deprecated, use the -movflags rtphint muxer specific AVOption instead
#endif
#define AVFMT_FLAG_CUSTOM_IO 0x0080 ///< The caller has supplied a custom AVIOContext, don't avio_close() it.
#define AVFMT_FLAG_MP4A_LATM 0x8000 ///< Enable RTP MP4A-LATM payload
#define AVFMT_FLAG_SORT_DTS 0x10000 ///< try to interleave outputted packets by dts (using this flag can slow demuxing down)
#define AVFMT_FLAG_PRIV_OPT 0x20000 ///< Enable use of private options by delaying codec open (this could be made default once all code is converted)
#define AVFMT_FLAG_KEEP_SIDE_DATA 0x40000 ///< Dont merge side data but keep it seperate.
int loop_input;
/**
* decoding: size of data to probe; encoding: unused.
*/
unsigned int probesize;
/**
* Maximum time (in AV_TIME_BASE units) during which the input should
* be analyzed in av_find_stream_info().
*/
int max_analyze_duration;
const uint8_t *key;
int keylen;
unsigned int nb_programs;
AVProgram **programs;
/**
* Forced video codec_id.
* Demuxing: Set by user.
*/
enum CodecID video_codec_id;
/**
* Forced audio codec_id.
* Demuxing: Set by user.
*/
enum CodecID audio_codec_id;
/**
* Forced subtitle codec_id.
* Demuxing: Set by user.
*/
enum CodecID subtitle_codec_id;
/**
* Maximum amount of memory in bytes to use for the index of each stream.
* If the index exceeds this size, entries will be discarded as
* needed to maintain a smaller size. This can lead to slower or less
* accurate seeking (depends on demuxer).
* Demuxers for which a full in-memory index is mandatory will ignore
* this.
* muxing : unused
* demuxing: set by user
*/
unsigned int max_index_size;
/**
* Maximum amount of memory in bytes to use for buffering frames
* obtained from realtime capture devices.
*/
unsigned int max_picture_buffer;
unsigned int nb_chapters;
AVChapter **chapters;
/**
* Flags to enable debugging.
*/
int debug;
#define FF_FDEBUG_TS 0x0001
/**
* Raw packets from the demuxer, prior to parsing and decoding.
* This buffer is used for buffering packets until the codec can
* be identified, as parsing cannot be done without knowing the
* codec.
*/
struct AVPacketList *raw_packet_buffer;
struct AVPacketList *raw_packet_buffer_end;
struct AVPacketList *packet_buffer_end;
AVDictionary *metadata;
/**
* Remaining size available for raw_packet_buffer, in bytes.
* NOT PART OF PUBLIC API
*/
#define RAW_PACKET_BUFFER_SIZE 2500000
int raw_packet_buffer_remaining_size;
/**
* Start time of the stream in real world time, in microseconds
* since the unix epoch (00:00 1st January 1970). That is, pts=0
* in the stream was captured at this real world time.
* - encoding: Set by user.
* - decoding: Unused.
*/
int64_t start_time_realtime;
/**
* decoding: number of frames used to probe fps
*/
int fps_probe_size;
/**
* Transport stream id.
* This will be moved into demuxer private options. Thus no API/ABI compatibility
*/
int ts_id;
} AVFormatContext;
typedef struct AVPacketList {
AVPacket pkt;
struct AVPacketList *next;
} AVPacketList;
/**
* If f is NULL, returns the first registered input format,
* if f is non-NULL, returns the next registered input format after f
* or NULL if f is the last one.
*/
AVInputFormat *av_iformat_next(AVInputFormat *f);
/**
* If f is NULL, returns the first registered output format,
* if f is non-NULL, returns the next registered output format after f
* or NULL if f is the last one.
*/
AVOutputFormat *av_oformat_next(AVOutputFormat *f);
#if FF_API_GUESS_IMG2_CODEC
attribute_deprecated enum CodecID av_guess_image2_codec(const char *filename);
#endif
/* XXX: Use automatic init with either ELF sections or C file parser */
/* modules. */
/* utils.c */
void av_register_input_format(AVInputFormat *format);
void av_register_output_format(AVOutputFormat *format);
/**
* Return the output format in the list of registered output formats
* which best matches the provided parameters, or return NULL if
* there is no match.
*
* @param short_name if non-NULL checks if short_name matches with the
* names of the registered formats
* @param filename if non-NULL checks if filename terminates with the
* extensions of the registered formats
* @param mime_type if non-NULL checks if mime_type matches with the
* MIME type of the registered formats
*/
AVOutputFormat *av_guess_format(const char *short_name,
const char *filename,
const char *mime_type);
/**
* Guess the codec ID based upon muxer and filename.
*/
enum CodecID av_guess_codec(AVOutputFormat *fmt, const char *short_name,
const char *filename, const char *mime_type,
enum AVMediaType type);
/**
* Send a nice hexadecimal dump of a buffer to the specified file stream.
*
* @param f The file stream pointer where the dump should be sent to.
* @param buf buffer
* @param size buffer size
*
* @see av_hex_dump_log, av_pkt_dump2, av_pkt_dump_log2
*/
void av_hex_dump(FILE *f, uint8_t *buf, int size);
/**
* Send a nice hexadecimal dump of a buffer to the log.
*
* @param avcl A pointer to an arbitrary struct of which the first field is a
* pointer to an AVClass struct.
* @param level The importance level of the message, lower values signifying
* higher importance.
* @param buf buffer
* @param size buffer size
*
* @see av_hex_dump, av_pkt_dump2, av_pkt_dump_log2
*/
void av_hex_dump_log(void *avcl, int level, uint8_t *buf, int size);
/**
* Send a nice dump of a packet to the specified file stream.
*
* @param f The file stream pointer where the dump should be sent to.
* @param pkt packet to dump
* @param dump_payload True if the payload must be displayed, too.
* @param st AVStream that the packet belongs to
*/
void av_pkt_dump2(FILE *f, AVPacket *pkt, int dump_payload, AVStream *st);
/**
* Send a nice dump of a packet to the log.
*
* @param avcl A pointer to an arbitrary struct of which the first field is a
* pointer to an AVClass struct.
* @param level The importance level of the message, lower values signifying
* higher importance.
* @param pkt packet to dump
* @param dump_payload True if the payload must be displayed, too.
* @param st AVStream that the packet belongs to
*/
void av_pkt_dump_log2(void *avcl, int level, AVPacket *pkt, int dump_payload,
AVStream *st);
#if FF_API_PKT_DUMP
attribute_deprecated void av_pkt_dump(FILE *f, AVPacket *pkt, int dump_payload);
attribute_deprecated void av_pkt_dump_log(void *avcl, int level, AVPacket *pkt,
int dump_payload);
#endif
/**
* Initialize libavformat and register all the muxers, demuxers and
* protocols. If you do not call this function, then you can select
* exactly which formats you want to support.
*
* @see av_register_input_format()
* @see av_register_output_format()
* @see av_register_protocol()
*/
void av_register_all(void);
/**
* Get the CodecID for the given codec tag tag.
* If no codec id is found returns CODEC_ID_NONE.
*
* @param tags list of supported codec_id-codec_tag pairs, as stored
* in AVInputFormat.codec_tag and AVOutputFormat.codec_tag
*/
enum CodecID av_codec_get_id(const struct AVCodecTag * const *tags, unsigned int tag);
/**
* Get the codec tag for the given codec id id.
* If no codec tag is found returns 0.
*
* @param tags list of supported codec_id-codec_tag pairs, as stored
* in AVInputFormat.codec_tag and AVOutputFormat.codec_tag
*/
unsigned int av_codec_get_tag(const struct AVCodecTag * const *tags, enum CodecID id);
/* media file input */
/**
* Find AVInputFormat based on the short name of the input format.
*/
AVInputFormat *av_find_input_format(const char *short_name);
/**
* Guess the file format.
*
* @param is_opened Whether the file is already opened; determines whether
* demuxers with or without AVFMT_NOFILE are probed.
*/
AVInputFormat *av_probe_input_format(AVProbeData *pd, int is_opened);
/**
* Guess the file format.
*
* @param is_opened Whether the file is already opened; determines whether
* demuxers with or without AVFMT_NOFILE are probed.
* @param score_max A probe score larger that this is required to accept a
* detection, the variable is set to the actual detection
* score afterwards.
* If the score is <= AVPROBE_SCORE_MAX / 4 it is recommended
* to retry with a larger probe buffer.
*/
AVInputFormat *av_probe_input_format2(AVProbeData *pd, int is_opened, int *score_max);
/**
* Guess the file format.
*
* @param is_opened Whether the file is already opened; determines whether
* demuxers with or without AVFMT_NOFILE are probed.
* @param score_ret The score of the best detection.
*/
AVInputFormat *av_probe_input_format3(AVProbeData *pd, int is_opened, int *score_ret);
/**
* Probe a bytestream to determine the input format. Each time a probe returns
* with a score that is too low, the probe buffer size is increased and another
* attempt is made. When the maximum probe size is reached, the input format
* with the highest score is returned.
*
* @param pb the bytestream to probe
* @param fmt the input format is put here
* @param filename the filename of the stream
* @param logctx the log context
* @param offset the offset within the bytestream to probe from
* @param max_probe_size the maximum probe buffer size (zero for default)
* @return 0 in case of success, a negative value corresponding to an
* AVERROR code otherwise
*/
int av_probe_input_buffer(AVIOContext *pb, AVInputFormat **fmt,
const char *filename, void *logctx,
unsigned int offset, unsigned int max_probe_size);
#if FF_API_FORMAT_PARAMETERS
/**
* Allocate all the structures needed to read an input stream.
* This does not open the needed codecs for decoding the stream[s].
* @deprecated use avformat_open_input instead.
*/
attribute_deprecated int av_open_input_stream(AVFormatContext **ic_ptr,
AVIOContext *pb, const char *filename,
AVInputFormat *fmt, AVFormatParameters *ap);
/**
* Open a media file as input. The codecs are not opened. Only the file
* header (if present) is read.
*
* @param ic_ptr The opened media file handle is put here.
* @param filename filename to open
* @param fmt If non-NULL, force the file format to use.
* @param buf_size optional buffer size (zero if default is OK)
* @param ap Additional parameters needed when opening the file
* (NULL if default).
* @return 0 if OK, AVERROR_xxx otherwise
*
* @deprecated use avformat_open_input instead.
*/
attribute_deprecated int av_open_input_file(AVFormatContext **ic_ptr, const char *filename,
AVInputFormat *fmt,
int buf_size,
AVFormatParameters *ap);
#endif
/**
* Open an input stream and read the header. The codecs are not opened.
* The stream must be closed with av_close_input_file().
*
* @param ps Pointer to user-supplied AVFormatContext (allocated by avformat_alloc_context).
* May be a pointer to NULL, in which case an AVFormatContext is allocated by this
* function and written into ps.
* Note that a user-supplied AVFormatContext will be freed on failure.
* @param filename Name of the stream to open.
* @param fmt If non-NULL, this parameter forces a specific input format.
* Otherwise the format is autodetected.
* @param options A dictionary filled with AVFormatContext and demuxer-private options.
* On return this parameter will be destroyed and replaced with a dict containing
* options that were not found. May be NULL.
*
* @return 0 on success, a negative AVERROR on failure.
*
* @note If you want to use custom IO, preallocate the format context and set its pb field.
*/
int avformat_open_input(AVFormatContext **ps, const char *filename, AVInputFormat *fmt, AVDictionary **options);
int av_demuxer_open(AVFormatContext *ic, AVFormatParameters *ap);
/**
* Allocate an AVFormatContext.
* avformat_free_context() can be used to free the context and everything
* allocated by the framework within it.
*/
AVFormatContext *avformat_alloc_context(void);
#if FF_API_ALLOC_OUTPUT_CONTEXT
/**
* @deprecated deprecated in favor of avformat_alloc_output_context2()
*/
attribute_deprecated
AVFormatContext *avformat_alloc_output_context(const char *format,
AVOutputFormat *oformat,
const char *filename);
#endif
/**
* Allocate an AVFormatContext for an output format.
* avformat_free_context() can be used to free the context and
* everything allocated by the framework within it.
*
* @param *ctx is set to the created format context, or to NULL in
* case of failure
* @param oformat format to use for allocating the context, if NULL
* format_name and filename are used instead
* @param format_name the name of output format to use for allocating the
* context, if NULL filename is used instead
* @param filename the name of the filename to use for allocating the
* context, may be NULL
* @return >= 0 in case of success, a negative AVERROR code in case of
* failure
*/
int avformat_alloc_output_context2(AVFormatContext **ctx, AVOutputFormat *oformat,
const char *format_name, const char *filename);
/**
* Read packets of a media file to get stream information. This
* is useful for file formats with no headers such as MPEG. This
* function also computes the real framerate in case of MPEG-2 repeat
* frame mode.
* The logical file position is not changed by this function;
* examined packets may be buffered for later processing.
*
* @param ic media file handle
* @return >=0 if OK, AVERROR_xxx on error
* @todo Let the user decide somehow what information is needed so that
* we do not waste time getting stuff the user does not need.
*/
int av_find_stream_info(AVFormatContext *ic);
/**
* Find the "best" stream in the file.
* The best stream is determined according to various heuristics as the most
* likely to be what the user expects.
* If the decoder parameter is non-NULL, av_find_best_stream will find the
* default decoder for the stream's codec; streams for which no decoder can
* be found are ignored.
*
* @param ic media file handle
* @param type stream type: video, audio, subtitles, etc.
* @param wanted_stream_nb user-requested stream number,
* or -1 for automatic selection
* @param related_stream try to find a stream related (eg. in the same
* program) to this one, or -1 if none
* @param decoder_ret if non-NULL, returns the decoder for the
* selected stream
* @param flags flags; none are currently defined
* @return the non-negative stream number in case of success,
* AVERROR_STREAM_NOT_FOUND if no stream with the requested type
* could be found,
* AVERROR_DECODER_NOT_FOUND if streams were found but no decoder
* @note If av_find_best_stream returns successfully and decoder_ret is not
* NULL, then *decoder_ret is guaranteed to be set to a valid AVCodec.
*/
int av_find_best_stream(AVFormatContext *ic,
enum AVMediaType type,
int wanted_stream_nb,
int related_stream,
AVCodec **decoder_ret,
int flags);
/**
* Read a transport packet from a media file.
*
* This function is obsolete and should never be used.
* Use av_read_frame() instead.
*
* @param s media file handle
* @param pkt is filled
* @return 0 if OK, AVERROR_xxx on error
*/
int av_read_packet(AVFormatContext *s, AVPacket *pkt);
/**
* Return the next frame of a stream.
* This function returns what is stored in the file, and does not validate
* that what is there are valid frames for the decoder. It will split what is
* stored in the file into frames and return one for each call. It will not
* omit invalid data between valid frames so as to give the decoder the maximum
* information possible for decoding.
*
* The returned packet is valid
* until the next av_read_frame() or until av_close_input_file() and
* must be freed with av_free_packet. For video, the packet contains
* exactly one frame. For audio, it contains an integer number of
* frames if each frame has a known fixed size (e.g. PCM or ADPCM
* data). If the audio frames have a variable size (e.g. MPEG audio),
* then it contains one frame.
*
* pkt->pts, pkt->dts and pkt->duration are always set to correct
* values in AVStream.time_base units (and guessed if the format cannot
* provide them). pkt->pts can be AV_NOPTS_VALUE if the video format
* has B-frames, so it is better to rely on pkt->dts if you do not
* decompress the payload.
*
* @return 0 if OK, < 0 on error or end of file
*/
int av_read_frame(AVFormatContext *s, AVPacket *pkt);
/**
* Seek to the keyframe at timestamp.
* 'timestamp' in 'stream_index'.
* @param stream_index If stream_index is (-1), a default
* stream is selected, and timestamp is automatically converted
* from AV_TIME_BASE units to the stream specific time_base.
* @param timestamp Timestamp in AVStream.time_base units
* or, if no stream is specified, in AV_TIME_BASE units.
* @param flags flags which select direction and seeking mode
* @return >= 0 on success
*/
int av_seek_frame(AVFormatContext *s, int stream_index, int64_t timestamp,
int flags);
/**
* Seek to timestamp ts.
* Seeking will be done so that the point from which all active streams
* can be presented successfully will be closest to ts and within min/max_ts.
* Active streams are all streams that have AVStream.discard < AVDISCARD_ALL.
*
* If flags contain AVSEEK_FLAG_BYTE, then all timestamps are in bytes and
* are the file position (this may not be supported by all demuxers).
* If flags contain AVSEEK_FLAG_FRAME, then all timestamps are in frames
* in the stream with stream_index (this may not be supported by all demuxers).
* Otherwise all timestamps are in units of the stream selected by stream_index
* or if stream_index is -1, in AV_TIME_BASE units.
* If flags contain AVSEEK_FLAG_ANY, then non-keyframes are treated as
* keyframes (this may not be supported by all demuxers).
*
* @param stream_index index of the stream which is used as time base reference
* @param min_ts smallest acceptable timestamp
* @param ts target timestamp
* @param max_ts largest acceptable timestamp
* @param flags flags
* @return >=0 on success, error code otherwise
*
* @note This is part of the new seek API which is still under construction.
* Thus do not use this yet. It may change at any time, do not expect
* ABI compatibility yet!
*/
int avformat_seek_file(AVFormatContext *s, int stream_index, int64_t min_ts, int64_t ts, int64_t max_ts, int flags);
/**
* Start playing a network-based stream (e.g. RTSP stream) at the
* current position.
*/
int av_read_play(AVFormatContext *s);
/**
* Pause a network-based stream (e.g. RTSP stream).
*
* Use av_read_play() to resume it.
*/
int av_read_pause(AVFormatContext *s);
/**
* Free a AVFormatContext allocated by av_open_input_stream.
* @param s context to free
*/
void av_close_input_stream(AVFormatContext *s);
/**
* Close a media file (but not its codecs).
*
* @param s media file handle
*/
void av_close_input_file(AVFormatContext *s);
/**
* Free an AVFormatContext and all its streams.
* @param s context to free
*/
void avformat_free_context(AVFormatContext *s);
/**
* Add a new stream to a media file.
*
* Can only be called in the read_header() function. If the flag
* AVFMTCTX_NOHEADER is in the format context, then new streams
* can be added in read_packet too.
*
* @param s media file handle
* @param id file-format-dependent stream ID
*/
AVStream *av_new_stream(AVFormatContext *s, int id);
AVProgram *av_new_program(AVFormatContext *s, int id);
/**
* Set the pts for a given stream. If the new values would be invalid
* (<= 0), it leaves the AVStream unchanged.
*
* @param s stream
* @param pts_wrap_bits number of bits effectively used by the pts
* (used for wrap control, 33 is the value for MPEG)
* @param pts_num numerator to convert to seconds (MPEG: 1)
* @param pts_den denominator to convert to seconds (MPEG: 90000)
*/
void av_set_pts_info(AVStream *s, int pts_wrap_bits,
unsigned int pts_num, unsigned int pts_den);
#define AVSEEK_FLAG_BACKWARD 1 ///< seek backward
#define AVSEEK_FLAG_BYTE 2 ///< seeking based on position in bytes
#define AVSEEK_FLAG_ANY 4 ///< seek to any frame, even non-keyframes
#define AVSEEK_FLAG_FRAME 8 ///< seeking based on frame number
int av_find_default_stream_index(AVFormatContext *s);
/**
* Get the index for a specific timestamp.
* @param flags if AVSEEK_FLAG_BACKWARD then the returned index will correspond
* to the timestamp which is <= the requested one, if backward
* is 0, then it will be >=
* if AVSEEK_FLAG_ANY seek to any frame, only keyframes otherwise
* @return < 0 if no such timestamp could be found
*/
int av_index_search_timestamp(AVStream *st, int64_t timestamp, int flags);
/**
* Add an index entry into a sorted list. Update the entry if the list
* already contains it.
*
* @param timestamp timestamp in the time base of the given stream
*/
int av_add_index_entry(AVStream *st, int64_t pos, int64_t timestamp,
int size, int distance, int flags);
/**
* Perform a binary search using av_index_search_timestamp() and
* AVInputFormat.read_timestamp().
* This is not supposed to be called directly by a user application,
* but by demuxers.
* @param target_ts target timestamp in the time base of the given stream
* @param stream_index stream number
*/
int av_seek_frame_binary(AVFormatContext *s, int stream_index,
int64_t target_ts, int flags);
/**
* Update cur_dts of all streams based on the given timestamp and AVStream.
*
* Stream ref_st unchanged, others set cur_dts in their native time base.
* Only needed for timestamp wrapping or if (dts not set and pts!=dts).
* @param timestamp new dts expressed in time_base of param ref_st
* @param ref_st reference stream giving time_base of param timestamp
*/
void av_update_cur_dts(AVFormatContext *s, AVStream *ref_st, int64_t timestamp);
/**
* Perform a binary search using read_timestamp().
* This is not supposed to be called directly by a user application,
* but by demuxers.
* @param target_ts target timestamp in the time base of the given stream
* @param stream_index stream number
*/
int64_t av_gen_search(AVFormatContext *s, int stream_index,
int64_t target_ts, int64_t pos_min,
int64_t pos_max, int64_t pos_limit,
int64_t ts_min, int64_t ts_max,
int flags, int64_t *ts_ret,
int64_t (*read_timestamp)(struct AVFormatContext *, int , int64_t *, int64_t ));
/**
* media file output
*/
#if FF_API_FORMAT_PARAMETERS
/**
* @deprecated pass the options to avformat_write_header directly.
*/
attribute_deprecated int av_set_parameters(AVFormatContext *s, AVFormatParameters *ap);
#endif
/**
* Split a URL string into components.
*
* The pointers to buffers for storing individual components may be null,
* in order to ignore that component. Buffers for components not found are
* set to empty strings. If the port is not found, it is set to a negative
* value.
*
* @param proto the buffer for the protocol
* @param proto_size the size of the proto buffer
* @param authorization the buffer for the authorization
* @param authorization_size the size of the authorization buffer
* @param hostname the buffer for the host name
* @param hostname_size the size of the hostname buffer
* @param port_ptr a pointer to store the port number in
* @param path the buffer for the path
* @param path_size the size of the path buffer
* @param url the URL to split
*/
void av_url_split(char *proto, int proto_size,
char *authorization, int authorization_size,
char *hostname, int hostname_size,
int *port_ptr,
char *path, int path_size,
const char *url);
/**
* Allocate the stream private data and write the stream header to
* an output media file.
*
* @param s Media file handle, must be allocated with avformat_alloc_context().
* Its oformat field must be set to the desired output format;
* Its pb field must be set to an already openened AVIOContext.
* @param options An AVDictionary filled with AVFormatContext and muxer-private options.
* On return this parameter will be destroyed and replaced with a dict containing
* options that were not found. May be NULL.
*
* @return 0 on success, negative AVERROR on failure.
*
* @see av_opt_find, av_dict_set, avio_open, av_oformat_next.
*/
int avformat_write_header(AVFormatContext *s, AVDictionary **options);
#if FF_API_FORMAT_PARAMETERS
/**
* Allocate the stream private data and write the stream header to an
* output media file.
* @note: this sets stream time-bases, if possible to stream->codec->time_base
* but for some formats it might also be some other time base
*
* @param s media file handle
* @return 0 if OK, AVERROR_xxx on error
*
* @deprecated use avformat_write_header.
*/
attribute_deprecated int av_write_header(AVFormatContext *s);
#endif
/**
* Write a packet to an output media file.
*
* The packet shall contain one audio or video frame.
* The packet must be correctly interleaved according to the container
* specification, if not then av_interleaved_write_frame must be used.
*
* @param s media file handle
* @param pkt The packet, which contains the stream_index, buf/buf_size,
dts/pts, ...
* @return < 0 on error, = 0 if OK, 1 if end of stream wanted
*/
int av_write_frame(AVFormatContext *s, AVPacket *pkt);
/**
* Write a packet to an output media file ensuring correct interleaving.
*
* The packet must contain one audio or video frame.
* If the packets are already correctly interleaved, the application should
* call av_write_frame() instead as it is slightly faster. It is also important
* to keep in mind that completely non-interleaved input will need huge amounts
* of memory to interleave with this, so it is preferable to interleave at the
* demuxer level.
*
* @param s media file handle
* @param pkt The packet, which contains the stream_index, buf/buf_size,
dts/pts, ...
* @return < 0 on error, = 0 if OK, 1 if end of stream wanted
*/
int av_interleaved_write_frame(AVFormatContext *s, AVPacket *pkt);
/**
* Interleave a packet per dts in an output media file.
*
* Packets with pkt->destruct == av_destruct_packet will be freed inside this
* function, so they cannot be used after it. Note that calling av_free_packet()
* on them is still safe.
*
* @param s media file handle
* @param out the interleaved packet will be output here
* @param pkt the input packet
* @param flush 1 if no further packets are available as input and all
* remaining packets should be output
* @return 1 if a packet was output, 0 if no packet could be output,
* < 0 if an error occurred
*/
int av_interleave_packet_per_dts(AVFormatContext *s, AVPacket *out,
AVPacket *pkt, int flush);
/**
* Write the stream trailer to an output media file and free the
* file private data.
*
* May only be called after a successful call to av_write_header.
*
* @param s media file handle
* @return 0 if OK, AVERROR_xxx on error
*/
int av_write_trailer(AVFormatContext *s);
#if FF_API_DUMP_FORMAT
/**
* @deprecated Deprecated in favor of av_dump_format().
*/
attribute_deprecated void dump_format(AVFormatContext *ic,
int index,
const char *url,
int is_output);
#endif
void av_dump_format(AVFormatContext *ic,
int index,
const char *url,
int is_output);
#if FF_API_PARSE_DATE
/**
* Parse datestr and return a corresponding number of microseconds.
*
* @param datestr String representing a date or a duration.
* See av_parse_time() for the syntax of the provided string.
* @deprecated in favor of av_parse_time()
*/
attribute_deprecated
int64_t parse_date(const char *datestr, int duration);
#endif
/**
* Get the current time in microseconds.
*/
int64_t av_gettime(void);
#if FF_API_FIND_INFO_TAG
/**
* @deprecated use av_find_info_tag in libavutil instead.
*/
attribute_deprecated int find_info_tag(char *arg, int arg_size, const char *tag1, const char *info);
#endif
/**
* Return in 'buf' the path with '%d' replaced by a number.
*
* Also handles the '%0nd' format where 'n' is the total number
* of digits and '%%'.
*
* @param buf destination buffer
* @param buf_size destination buffer size
* @param path numbered sequence string
* @param number frame number
* @return 0 if OK, -1 on format error
*/
int av_get_frame_filename(char *buf, int buf_size,
const char *path, int number);
/**
* Check whether filename actually is a numbered sequence generator.
*
* @param filename possible numbered sequence string
* @return 1 if a valid numbered sequence string, 0 otherwise
*/
int av_filename_number_test(const char *filename);
/**
* Generate an SDP for an RTP session.
*
* @param ac array of AVFormatContexts describing the RTP streams. If the
* array is composed by only one context, such context can contain
* multiple AVStreams (one AVStream per RTP stream). Otherwise,
* all the contexts in the array (an AVCodecContext per RTP stream)
* must contain only one AVStream.
* @param n_files number of AVCodecContexts contained in ac
* @param buf buffer where the SDP will be stored (must be allocated by
* the caller)
* @param size the size of the buffer
* @return 0 if OK, AVERROR_xxx on error
*/
int av_sdp_create(AVFormatContext *ac[], int n_files, char *buf, int size);
#if FF_API_SDP_CREATE
attribute_deprecated int avf_sdp_create(AVFormatContext *ac[], int n_files, char *buff, int size);
#endif
/**
* Return a positive value if the given filename has one of the given
* extensions, 0 otherwise.
*
* @param extensions a comma-separated list of filename extensions
*/
int av_match_ext(const char *filename, const char *extensions);
#endif /* AVFORMAT_AVFORMAT_H */
/*
* copyright (c) 2001 Fabrice Bellard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFORMAT_AVIO_H
#define AVFORMAT_AVIO_H
/**
* @file
* Buffered I/O operations
*/
#include <stdint.h>
#include "libavutil/common.h"
#include "libavutil/log.h"
#include "libavformat/version.h"
#define AVIO_SEEKABLE_NORMAL 0x0001 /**< Seeking works like for a local file */
/**
* Bytestream IO Context.
* New fields can be added to the end with minor version bumps.
* Removal, reordering and changes to existing fields require a major
* version bump.
* sizeof(AVIOContext) must not be used outside libav*.
*
* @note None of the function pointers in AVIOContext should be called
* directly, they should only be set by the client application
* when implementing custom I/O. Normally these are set to the
* function pointers specified in avio_alloc_context()
*/
typedef struct {
unsigned char *buffer; /**< Start of the buffer. */
int buffer_size; /**< Maximum buffer size */
unsigned char *buf_ptr; /**< Current position in the buffer */
unsigned char *buf_end; /**< End of the data, may be less than
buffer+buffer_size if the read function returned
less data than requested, e.g. for streams where
no more data has been received yet. */
void *opaque; /**< A private pointer, passed to the read/write/seek/...
functions. */
int (*read_packet)(void *opaque, uint8_t *buf, int buf_size);
int (*write_packet)(void *opaque, uint8_t *buf, int buf_size);
int64_t (*seek)(void *opaque, int64_t offset, int whence);
int64_t pos; /**< position in the file of the current buffer */
int must_flush; /**< true if the next seek should flush */
int eof_reached; /**< true if eof reached */
int write_flag; /**< true if open for writing */
#if FF_API_OLD_AVIO
attribute_deprecated int is_streamed;
#endif
int max_packet_size;
unsigned long checksum;
unsigned char *checksum_ptr;
unsigned long (*update_checksum)(unsigned long checksum, const uint8_t *buf, unsigned int size);
int error; /**< contains the error code or 0 if no error happened */
/**
* Pause or resume playback for network streaming protocols - e.g. MMS.
*/
int (*read_pause)(void *opaque, int pause);
/**
* Seek to a given timestamp in stream with the specified stream_index.
* Needed for some network streaming protocols which don't support seeking
* to byte position.
*/
int64_t (*read_seek)(void *opaque, int stream_index,
int64_t timestamp, int flags);
/**
* A combination of AVIO_SEEKABLE_ flags or 0 when the stream is not seekable.
*/
int seekable;
} AVIOContext;
/* unbuffered I/O */
#if FF_API_OLD_AVIO
/**
* URL Context.
* New fields can be added to the end with minor version bumps.
* Removal, reordering and changes to existing fields require a major
* version bump.
* sizeof(URLContext) must not be used outside libav*.
* @deprecated This struct will be made private
*/
typedef struct URLContext {
const AVClass *av_class; ///< information for av_log(). Set by url_open().
struct URLProtocol *prot;
int flags;
int is_streamed; /**< true if streamed (no seek possible), default = false */
int max_packet_size; /**< if non zero, the stream is packetized with this max packet size */
void *priv_data;
char *filename; /**< specified URL */
int is_connected;
} URLContext;
#define URL_PROTOCOL_FLAG_NESTED_SCHEME 1 /*< The protocol name can be the first part of a nested protocol scheme */
/**
* @deprecated This struct is to be made private. Use the higher-level
* AVIOContext-based API instead.
*/
typedef struct URLProtocol {
const char *name;
int (*url_open)(URLContext *h, const char *url, int flags);
int (*url_read)(URLContext *h, unsigned char *buf, int size);
int (*url_write)(URLContext *h, const unsigned char *buf, int size);
int64_t (*url_seek)(URLContext *h, int64_t pos, int whence);
int (*url_close)(URLContext *h);
struct URLProtocol *next;
int (*url_read_pause)(URLContext *h, int pause);
int64_t (*url_read_seek)(URLContext *h, int stream_index,
int64_t timestamp, int flags);
int (*url_get_file_handle)(URLContext *h);
int priv_data_size;
const AVClass *priv_data_class;
int flags;
int (*url_check)(URLContext *h, int mask);
} URLProtocol;
typedef struct URLPollEntry {
URLContext *handle;
int events;
int revents;
} URLPollEntry;
/* not implemented */
attribute_deprecated int url_poll(URLPollEntry *poll_table, int n, int timeout);
/**
* @defgroup open_modes URL open modes
* The flags argument to url_open and cosins must be one of the following
* constants, optionally ORed with other flags.
* @{
*/
#define URL_RDONLY 1 /**< read-only */
#define URL_WRONLY 2 /**< write-only */
#define URL_RDWR (URL_RDONLY|URL_WRONLY) /**< read-write */
/**
* @}
*/
/**
* Use non-blocking mode.
* If this flag is set, operations on the context will return
* AVERROR(EAGAIN) if they can not be performed immediately.
* If this flag is not set, operations on the context will never return
* AVERROR(EAGAIN).
* Note that this flag does not affect the opening/connecting of the
* context. Connecting a protocol will always block if necessary (e.g. on
* network protocols) but never hang (e.g. on busy devices).
* Warning: non-blocking protocols is work-in-progress; this flag may be
* silently ignored.
*/
#define URL_FLAG_NONBLOCK 4
typedef int URLInterruptCB(void);
extern URLInterruptCB *url_interrupt_cb;
/**
* @defgroup old_url_funcs Old url_* functions
* @deprecated use the buffered API based on AVIOContext instead
* @{
*/
attribute_deprecated int url_open_protocol (URLContext **puc, struct URLProtocol *up,
const char *url, int flags);
attribute_deprecated int url_alloc(URLContext **h, const char *url, int flags);
attribute_deprecated int url_connect(URLContext *h);
attribute_deprecated int url_open(URLContext **h, const char *url, int flags);
attribute_deprecated int url_read(URLContext *h, unsigned char *buf, int size);
attribute_deprecated int url_read_complete(URLContext *h, unsigned char *buf, int size);
attribute_deprecated int url_write(URLContext *h, const unsigned char *buf, int size);
attribute_deprecated int64_t url_seek(URLContext *h, int64_t pos, int whence);
attribute_deprecated int url_close(URLContext *h);
attribute_deprecated int64_t url_filesize(URLContext *h);
attribute_deprecated int url_get_file_handle(URLContext *h);
attribute_deprecated int url_get_max_packet_size(URLContext *h);
attribute_deprecated void url_get_filename(URLContext *h, char *buf, int buf_size);
attribute_deprecated int av_url_read_pause(URLContext *h, int pause);
attribute_deprecated int64_t av_url_read_seek(URLContext *h, int stream_index,
int64_t timestamp, int flags);
attribute_deprecated void url_set_interrupt_cb(int (*interrupt_cb)(void));
/**
* returns the next registered protocol after the given protocol (the first if
* NULL is given), or NULL if protocol is the last one.
*/
URLProtocol *av_protocol_next(URLProtocol *p);
/**
* Register the URLProtocol protocol.
*
* @param size the size of the URLProtocol struct referenced
*/
attribute_deprecated int av_register_protocol2(URLProtocol *protocol, int size);
/**
* @}
*/
typedef attribute_deprecated AVIOContext ByteIOContext;
attribute_deprecated int init_put_byte(AVIOContext *s,
unsigned char *buffer,
int buffer_size,
int write_flag,
void *opaque,
int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
int (*write_packet)(void *opaque, uint8_t *buf, int buf_size),
int64_t (*seek)(void *opaque, int64_t offset, int whence));
attribute_deprecated AVIOContext *av_alloc_put_byte(
unsigned char *buffer,
int buffer_size,
int write_flag,
void *opaque,
int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
int (*write_packet)(void *opaque, uint8_t *buf, int buf_size),
int64_t (*seek)(void *opaque, int64_t offset, int whence));
/**
* @defgroup old_avio_funcs Old put_/get_*() functions
* @deprecated use the avio_ -prefixed functions instead.
* @{
*/
attribute_deprecated int get_buffer(AVIOContext *s, unsigned char *buf, int size);
attribute_deprecated int get_partial_buffer(AVIOContext *s, unsigned char *buf, int size);
attribute_deprecated int get_byte(AVIOContext *s);
attribute_deprecated unsigned int get_le16(AVIOContext *s);
attribute_deprecated unsigned int get_le24(AVIOContext *s);
attribute_deprecated unsigned int get_le32(AVIOContext *s);
attribute_deprecated uint64_t get_le64(AVIOContext *s);
attribute_deprecated unsigned int get_be16(AVIOContext *s);
attribute_deprecated unsigned int get_be24(AVIOContext *s);
attribute_deprecated unsigned int get_be32(AVIOContext *s);
attribute_deprecated uint64_t get_be64(AVIOContext *s);
attribute_deprecated void put_byte(AVIOContext *s, int b);
attribute_deprecated void put_nbyte(AVIOContext *s, int b, int count);
attribute_deprecated void put_buffer(AVIOContext *s, const unsigned char *buf, int size);
attribute_deprecated void put_le64(AVIOContext *s, uint64_t val);
attribute_deprecated void put_be64(AVIOContext *s, uint64_t val);
attribute_deprecated void put_le32(AVIOContext *s, unsigned int val);
attribute_deprecated void put_be32(AVIOContext *s, unsigned int val);
attribute_deprecated void put_le24(AVIOContext *s, unsigned int val);
attribute_deprecated void put_be24(AVIOContext *s, unsigned int val);
attribute_deprecated void put_le16(AVIOContext *s, unsigned int val);
attribute_deprecated void put_be16(AVIOContext *s, unsigned int val);
attribute_deprecated void put_tag(AVIOContext *s, const char *tag);
/**
* @}
*/
attribute_deprecated int av_url_read_fpause(AVIOContext *h, int pause);
attribute_deprecated int64_t av_url_read_fseek (AVIOContext *h, int stream_index,
int64_t timestamp, int flags);
/**
* @defgroup old_url_f_funcs Old url_f* functions
* @deprecated use the avio_ -prefixed functions instead.
* @{
*/
attribute_deprecated int url_fopen( AVIOContext **s, const char *url, int flags);
attribute_deprecated int url_fclose(AVIOContext *s);
attribute_deprecated int64_t url_fseek(AVIOContext *s, int64_t offset, int whence);
attribute_deprecated int url_fskip(AVIOContext *s, int64_t offset);
attribute_deprecated int64_t url_ftell(AVIOContext *s);
attribute_deprecated int64_t url_fsize(AVIOContext *s);
#define URL_EOF (-1)
attribute_deprecated int url_fgetc(AVIOContext *s);
attribute_deprecated int url_setbufsize(AVIOContext *s, int buf_size);
#ifdef __GNUC__
attribute_deprecated int url_fprintf(AVIOContext *s, const char *fmt, ...) __attribute__ ((__format__ (__printf__, 2, 3)));
#else
attribute_deprecated int url_fprintf(AVIOContext *s, const char *fmt, ...);
#endif
attribute_deprecated void put_flush_packet(AVIOContext *s);
attribute_deprecated int url_open_dyn_buf(AVIOContext **s);
attribute_deprecated int url_open_dyn_packet_buf(AVIOContext **s, int max_packet_size);
attribute_deprecated int url_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer);
attribute_deprecated int url_fdopen(AVIOContext **s, URLContext *h);
/**
* @}
*/
attribute_deprecated int url_ferror(AVIOContext *s);
attribute_deprecated int udp_set_remote_url(URLContext *h, const char *uri);
attribute_deprecated int udp_get_local_port(URLContext *h);
attribute_deprecated void init_checksum(AVIOContext *s,
unsigned long (*update_checksum)(unsigned long c, const uint8_t *p, unsigned int len),
unsigned long checksum);
attribute_deprecated unsigned long get_checksum(AVIOContext *s);
attribute_deprecated void put_strz(AVIOContext *s, const char *buf);
/** @note unlike fgets, the EOL character is not returned and a whole
line is parsed. return NULL if first char read was EOF */
attribute_deprecated char *url_fgets(AVIOContext *s, char *buf, int buf_size);
/**
* @deprecated use avio_get_str instead
*/
attribute_deprecated char *get_strz(AVIOContext *s, char *buf, int maxlen);
/**
* @deprecated Use AVIOContext.seekable field directly.
*/
attribute_deprecated static inline int url_is_streamed(AVIOContext *s)
{
return !s->seekable;
}
attribute_deprecated URLContext *url_fileno(AVIOContext *s);
/**
* @deprecated use AVIOContext.max_packet_size directly.
*/
attribute_deprecated int url_fget_max_packet_size(AVIOContext *s);
attribute_deprecated int url_open_buf(AVIOContext **s, uint8_t *buf, int buf_size, int flags);
/** return the written or read size */
attribute_deprecated int url_close_buf(AVIOContext *s);
/**
* Return a non-zero value if the resource indicated by url
* exists, 0 otherwise.
* @deprecated Use avio_check instead.
*/
attribute_deprecated int url_exist(const char *url);
#endif // FF_API_OLD_AVIO
/**
* Return AVIO_FLAG_* access flags corresponding to the access permissions
* of the resource in url, or a negative value corresponding to an
* AVERROR code in case of failure. The returned access flags are
* masked by the value in flags.
*
* @note This function is intrinsically unsafe, in the sense that the
* checked resource may change its existence or permission status from
* one call to another. Thus you should not trust the returned value,
* unless you are sure that no other processes are accessing the
* checked resource.
*/
int avio_check(const char *url, int flags);
/**
* The callback is called in blocking functions to test regulary if
* asynchronous interruption is needed. AVERROR_EXIT is returned
* in this case by the interrupted function. 'NULL' means no interrupt
* callback is given.
*/
void avio_set_interrupt_cb(int (*interrupt_cb)(void));
/**
* Allocate and initialize an AVIOContext for buffered I/O. It must be later
* freed with av_free().
*
* @param buffer Memory block for input/output operations via AVIOContext.
* The buffer must be allocated with av_malloc() and friends.
* @param buffer_size The buffer size is very important for performance.
* For protocols with fixed blocksize it should be set to this blocksize.
* For others a typical size is a cache page, e.g. 4kb.
* @param write_flag Set to 1 if the buffer should be writable, 0 otherwise.
* @param opaque An opaque pointer to user-specific data.
* @param read_packet A function for refilling the buffer, may be NULL.
* @param write_packet A function for writing the buffer contents, may be NULL.
* @param seek A function for seeking to specified byte position, may be NULL.
*
* @return Allocated AVIOContext or NULL on failure.
*/
AVIOContext *avio_alloc_context(
unsigned char *buffer,
int buffer_size,
int write_flag,
void *opaque,
int (*read_packet)(void *opaque, uint8_t *buf, int buf_size),
int (*write_packet)(void *opaque, uint8_t *buf, int buf_size),
int64_t (*seek)(void *opaque, int64_t offset, int whence));
void avio_w8(AVIOContext *s, int b);
void avio_write(AVIOContext *s, const unsigned char *buf, int size);
void avio_wl64(AVIOContext *s, uint64_t val);
void avio_wb64(AVIOContext *s, uint64_t val);
void avio_wl32(AVIOContext *s, unsigned int val);
void avio_wb32(AVIOContext *s, unsigned int val);
void avio_wl24(AVIOContext *s, unsigned int val);
void avio_wb24(AVIOContext *s, unsigned int val);
void avio_wl16(AVIOContext *s, unsigned int val);
void avio_wb16(AVIOContext *s, unsigned int val);
/**
* Write a NULL-terminated string.
* @return number of bytes written.
*/
int avio_put_str(AVIOContext *s, const char *str);
/**
* Convert an UTF-8 string to UTF-16LE and write it.
* @return number of bytes written.
*/
int avio_put_str16le(AVIOContext *s, const char *str);
/**
* Passing this as the "whence" parameter to a seek function causes it to
* return the filesize without seeking anywhere. Supporting this is optional.
* If it is not supported then the seek function will return <0.
*/
#define AVSEEK_SIZE 0x10000
/**
* Oring this flag as into the "whence" parameter to a seek function causes it to
* seek by any means (like reopening and linear reading) or other normally unreasonble
* means that can be extreemly slow.
* This may be ignored by the seek code.
*/
#define AVSEEK_FORCE 0x20000
/**
* fseek() equivalent for AVIOContext.
* @return new position or AVERROR.
*/
int64_t avio_seek(AVIOContext *s, int64_t offset, int whence);
/**
* Skip given number of bytes forward
* @return new position or AVERROR.
*/
int64_t avio_skip(AVIOContext *s, int64_t offset);
/**
* ftell() equivalent for AVIOContext.
* @return position or AVERROR.
*/
static av_always_inline int64_t avio_tell(AVIOContext *s)
{
return avio_seek(s, 0, SEEK_CUR);
}
/**
* Get the filesize.
* @return filesize or AVERROR
*/
int64_t avio_size(AVIOContext *s);
/**
* feof() equivalent for AVIOContext.
* @return non zero if and only if end of file
*/
int url_feof(AVIOContext *s);
/** @warning currently size is limited */
#ifdef __GNUC__
int avio_printf(AVIOContext *s, const char *fmt, ...) __attribute__ ((__format__ (__printf__, 2, 3)));
#else
int avio_printf(AVIOContext *s, const char *fmt, ...);
#endif
void avio_flush(AVIOContext *s);
/**
* Read size bytes from AVIOContext into buf.
* @return number of bytes read or AVERROR
*/
int avio_read(AVIOContext *s, unsigned char *buf, int size);
/**
* @defgroup avio_read Functions for reading from AVIOContext.
* @{
*
* @note return 0 if EOF, so you cannot use it if EOF handling is
* necessary
*/
int avio_r8 (AVIOContext *s);
unsigned int avio_rl16(AVIOContext *s);
unsigned int avio_rl24(AVIOContext *s);
unsigned int avio_rl32(AVIOContext *s);
uint64_t avio_rl64(AVIOContext *s);
unsigned int avio_rb16(AVIOContext *s);
unsigned int avio_rb24(AVIOContext *s);
unsigned int avio_rb32(AVIOContext *s);
uint64_t avio_rb64(AVIOContext *s);
/**
* @}
*/
/**
* Read a string from pb into buf. The reading will terminate when either
* a NULL character was encountered, maxlen bytes have been read, or nothing
* more can be read from pb. The result is guaranteed to be NULL-terminated, it
* will be truncated if buf is too small.
* Note that the string is not interpreted or validated in any way, it
* might get truncated in the middle of a sequence for multi-byte encodings.
*
* @return number of bytes read (is always <= maxlen).
* If reading ends on EOF or error, the return value will be one more than
* bytes actually read.
*/
int avio_get_str(AVIOContext *pb, int maxlen, char *buf, int buflen);
/**
* Read a UTF-16 string from pb and convert it to UTF-8.
* The reading will terminate when either a null or invalid character was
* encountered or maxlen bytes have been read.
* @return number of bytes read (is always <= maxlen)
*/
int avio_get_str16le(AVIOContext *pb, int maxlen, char *buf, int buflen);
int avio_get_str16be(AVIOContext *pb, int maxlen, char *buf, int buflen);
/**
* @defgroup open_modes URL open modes
* The flags argument to avio_open must be one of the following
* constants, optionally ORed with other flags.
* @{
*/
#define AVIO_FLAG_READ 1 /**< read-only */
#define AVIO_FLAG_WRITE 2 /**< write-only */
#define AVIO_FLAG_READ_WRITE (AVIO_FLAG_READ|AVIO_FLAG_WRITE) /**< read-write pseudo flag */
/**
* @}
*/
/**
* Use non-blocking mode.
* If this flag is set, operations on the context will return
* AVERROR(EAGAIN) if they can not be performed immediately.
* If this flag is not set, operations on the context will never return
* AVERROR(EAGAIN).
* Note that this flag does not affect the opening/connecting of the
* context. Connecting a protocol will always block if necessary (e.g. on
* network protocols) but never hang (e.g. on busy devices).
* Warning: non-blocking protocols is work-in-progress; this flag may be
* silently ignored.
*/
#define AVIO_FLAG_NONBLOCK 8
/**
* Create and initialize a AVIOContext for accessing the
* resource indicated by url.
* @note When the resource indicated by url has been opened in
* read+write mode, the AVIOContext can be used only for writing.
*
* @param s Used to return the pointer to the created AVIOContext.
* In case of failure the pointed to value is set to NULL.
* @param flags flags which control how the resource indicated by url
* is to be opened
* @return 0 in case of success, a negative value corresponding to an
* AVERROR code in case of failure
*/
int avio_open(AVIOContext **s, const char *url, int flags);
/**
* Close the resource accessed by the AVIOContext s and free it.
* This function can only be used if s was opened by avio_open().
*
* @return 0 on success, an AVERROR < 0 on error.
*/
int avio_close(AVIOContext *s);
/**
* Open a write only memory stream.
*
* @param s new IO context
* @return zero if no error.
*/
int avio_open_dyn_buf(AVIOContext **s);
/**
* Return the written size and a pointer to the buffer. The buffer
* must be freed with av_free().
* Padding of FF_INPUT_BUFFER_PADDING_SIZE is added to the buffer.
*
* @param s IO context
* @param pbuffer pointer to a byte buffer
* @return the length of the byte buffer
*/
int avio_close_dyn_buf(AVIOContext *s, uint8_t **pbuffer);
/**
* Iterate through names of available protocols.
* @note it is recommanded to use av_protocol_next() instead of this
*
* @param opaque A private pointer representing current protocol.
* It must be a pointer to NULL on first iteration and will
* be updated by successive calls to avio_enum_protocols.
* @param output If set to 1, iterate over output protocols,
* otherwise over input protocols.
*
* @return A static string containing the name of current protocol or NULL
*/
const char *avio_enum_protocols(void **opaque, int output);
/**
* Pause and resume playing - only meaningful if using a network streaming
* protocol (e.g. MMS).
* @param pause 1 for pause, 0 for resume
*/
int avio_pause(AVIOContext *h, int pause);
/**
* Seek to a given timestamp relative to some component stream.
* Only meaningful if using a network streaming protocol (e.g. MMS.).
* @param stream_index The stream index that the timestamp is relative to.
* If stream_index is (-1) the timestamp should be in AV_TIME_BASE
* units from the beginning of the presentation.
* If a stream_index >= 0 is used and the protocol does not support
* seeking based on component streams, the call will fail with ENOTSUP.
* @param timestamp timestamp in AVStream.time_base units
* or if there is no stream specified then in AV_TIME_BASE units.
* @param flags Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE
* and AVSEEK_FLAG_ANY. The protocol may silently ignore
* AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will
* fail with ENOTSUP if used and not supported.
* @return >= 0 on success
* @see AVInputFormat::read_seek
*/
int64_t avio_seek_time(AVIOContext *h, int stream_index,
int64_t timestamp, int flags);
#endif /* AVFORMAT_AVIO_H */
/*
* Version macros.
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVFORMAT_VERSION_H
#define AVFORMAT_VERSION_H
#include "libavutil/avutil.h"
#define LIBAVFORMAT_VERSION_MAJOR 53
#define LIBAVFORMAT_VERSION_MINOR 4
#define LIBAVFORMAT_VERSION_MICRO 0
#define LIBAVFORMAT_VERSION_INT AV_VERSION_INT(LIBAVFORMAT_VERSION_MAJOR, \
LIBAVFORMAT_VERSION_MINOR, \
LIBAVFORMAT_VERSION_MICRO)
#define LIBAVFORMAT_VERSION AV_VERSION(LIBAVFORMAT_VERSION_MAJOR, \
LIBAVFORMAT_VERSION_MINOR, \
LIBAVFORMAT_VERSION_MICRO)
#define LIBAVFORMAT_BUILD LIBAVFORMAT_VERSION_INT
#define LIBAVFORMAT_IDENT "Lavf" AV_STRINGIFY(LIBAVFORMAT_VERSION)
/**
* Those FF_API_* defines are not part of public API.
* They may change, break or disappear at any time.
*/
#ifndef FF_API_OLD_METADATA2
#define FF_API_OLD_METADATA2 (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_READ_SEEK
#define FF_API_READ_SEEK (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_OLD_AVIO
#define FF_API_OLD_AVIO (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_DUMP_FORMAT
#define FF_API_DUMP_FORMAT (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_PARSE_DATE
#define FF_API_PARSE_DATE (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_FIND_INFO_TAG
#define FF_API_FIND_INFO_TAG (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_PKT_DUMP
#define FF_API_PKT_DUMP (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_GUESS_IMG2_CODEC
#define FF_API_GUESS_IMG2_CODEC (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_SDP_CREATE
#define FF_API_SDP_CREATE (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_ALLOC_OUTPUT_CONTEXT
#define FF_API_ALLOC_OUTPUT_CONTEXT (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_FORMAT_PARAMETERS
#define FF_API_FORMAT_PARAMETERS (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#ifndef FF_API_FLAG_RTP_HINT
#define FF_API_FLAG_RTP_HINT (LIBAVFORMAT_VERSION_MAJOR < 54)
#endif
#endif /* AVFORMAT_VERSION_H */
/*
* copyright (c) 2006 Mans Rullgard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_ADLER32_H
#define AVUTIL_ADLER32_H
#include <stdint.h>
#include "attributes.h"
/**
* Calculate the Adler32 checksum of a buffer.
*
* Passing the return value to a subsequent av_adler32_update() call
* allows the checksum of multiple buffers to be calculated as though
* they were concatenated.
*
* @param adler initial checksum value
* @param buf pointer to input buffer
* @param len size of input buffer
* @return updated checksum
*/
unsigned long av_adler32_update(unsigned long adler, const uint8_t *buf,
unsigned int len) av_pure;
#endif /* AVUTIL_ADLER32_H */
/*
* copyright (c) 2007 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AES_H
#define AVUTIL_AES_H
#include <stdint.h>
extern const int av_aes_size;
struct AVAES;
/**
* Initialize an AVAES context.
* @param key_bits 128, 192 or 256
* @param decrypt 0 for encryption, 1 for decryption
*/
int av_aes_init(struct AVAES *a, const uint8_t *key, int key_bits, int decrypt);
/**
* Encrypt or decrypt a buffer using a previously initialized context.
* @param count number of 16 byte blocks
* @param dst destination array, can be equal to src
* @param src source array, can be equal to dst
* @param iv initialization vector for CBC mode, if NULL then ECB will be used
* @param decrypt 0 for encryption, 1 for decryption
*/
void av_aes_crypt(struct AVAES *a, uint8_t *dst, const uint8_t *src, int count, uint8_t *iv, int decrypt);
#endif /* AVUTIL_AES_H */
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* Macro definitions for various function/variable attributes
*/
#ifndef AVUTIL_ATTRIBUTES_H
#define AVUTIL_ATTRIBUTES_H
#ifdef __GNUC__
# define AV_GCC_VERSION_AT_LEAST(x,y) (__GNUC__ > x || __GNUC__ == x && __GNUC_MINOR__ >= y)
#else
# define AV_GCC_VERSION_AT_LEAST(x,y) 0
#endif
#ifndef av_always_inline
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_always_inline __attribute__((always_inline)) inline
#else
# define av_always_inline inline
#endif
#endif
#ifndef av_noinline
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_noinline __attribute__((noinline))
#else
# define av_noinline
#endif
#endif
#ifndef av_pure
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_pure __attribute__((pure))
#else
# define av_pure
#endif
#endif
#ifndef av_const
#if AV_GCC_VERSION_AT_LEAST(2,6)
# define av_const __attribute__((const))
#else
# define av_const
#endif
#endif
#ifndef av_cold
#if AV_GCC_VERSION_AT_LEAST(4,3)
# define av_cold __attribute__((cold))
#else
# define av_cold
#endif
#endif
#ifndef av_flatten
#if AV_GCC_VERSION_AT_LEAST(4,1)
# define av_flatten __attribute__((flatten))
#else
# define av_flatten
#endif
#endif
#ifndef attribute_deprecated
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define attribute_deprecated __attribute__((deprecated))
#else
# define attribute_deprecated
#endif
#endif
#ifndef av_unused
#if defined(__GNUC__)
# define av_unused __attribute__((unused))
#else
# define av_unused
#endif
#endif
/**
* Mark a variable as used and prevent the compiler from optimizing it
* away. This is useful for variables accessed only from inline
* assembler without the compiler being aware.
*/
#ifndef av_used
#if AV_GCC_VERSION_AT_LEAST(3,1)
# define av_used __attribute__((used))
#else
# define av_used
#endif
#endif
#ifndef av_alias
#if AV_GCC_VERSION_AT_LEAST(3,3)
# define av_alias __attribute__((may_alias))
#else
# define av_alias
#endif
#endif
#ifndef av_uninit
#if defined(__GNUC__) && !defined(__INTEL_COMPILER)
# define av_uninit(x) x=x
#else
# define av_uninit(x) x
#endif
#endif
#ifdef __GNUC__
# define av_builtin_constant_p __builtin_constant_p
#else
# define av_builtin_constant_p(x) 0
#endif
#endif /* AVUTIL_ATTRIBUTES_H */
/*
* Copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
* Copyright (c) 2008 Peter Ross
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AUDIOCONVERT_H
#define AVUTIL_AUDIOCONVERT_H
#include <stdint.h>
/**
* @file
* audio conversion routines
*/
/* Audio channel masks */
#define AV_CH_FRONT_LEFT 0x00000001
#define AV_CH_FRONT_RIGHT 0x00000002
#define AV_CH_FRONT_CENTER 0x00000004
#define AV_CH_LOW_FREQUENCY 0x00000008
#define AV_CH_BACK_LEFT 0x00000010
#define AV_CH_BACK_RIGHT 0x00000020
#define AV_CH_FRONT_LEFT_OF_CENTER 0x00000040
#define AV_CH_FRONT_RIGHT_OF_CENTER 0x00000080
#define AV_CH_BACK_CENTER 0x00000100
#define AV_CH_SIDE_LEFT 0x00000200
#define AV_CH_SIDE_RIGHT 0x00000400
#define AV_CH_TOP_CENTER 0x00000800
#define AV_CH_TOP_FRONT_LEFT 0x00001000
#define AV_CH_TOP_FRONT_CENTER 0x00002000
#define AV_CH_TOP_FRONT_RIGHT 0x00004000
#define AV_CH_TOP_BACK_LEFT 0x00008000
#define AV_CH_TOP_BACK_CENTER 0x00010000
#define AV_CH_TOP_BACK_RIGHT 0x00020000
#define AV_CH_STEREO_LEFT 0x20000000 ///< Stereo downmix.
#define AV_CH_STEREO_RIGHT 0x40000000 ///< See AV_CH_STEREO_LEFT.
/** Channel mask value used for AVCodecContext.request_channel_layout
to indicate that the user requests the channel order of the decoder output
to be the native codec channel order. */
#define AV_CH_LAYOUT_NATIVE 0x8000000000000000LL
/* Audio channel convenience macros */
#define AV_CH_LAYOUT_MONO (AV_CH_FRONT_CENTER)
#define AV_CH_LAYOUT_STEREO (AV_CH_FRONT_LEFT|AV_CH_FRONT_RIGHT)
#define AV_CH_LAYOUT_2_1 (AV_CH_LAYOUT_STEREO|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_SURROUND (AV_CH_LAYOUT_STEREO|AV_CH_FRONT_CENTER)
#define AV_CH_LAYOUT_4POINT0 (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_CENTER)
#define AV_CH_LAYOUT_2_2 (AV_CH_LAYOUT_STEREO|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT)
#define AV_CH_LAYOUT_QUAD (AV_CH_LAYOUT_STEREO|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_5POINT0 (AV_CH_LAYOUT_SURROUND|AV_CH_SIDE_LEFT|AV_CH_SIDE_RIGHT)
#define AV_CH_LAYOUT_5POINT1 (AV_CH_LAYOUT_5POINT0|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_5POINT0_BACK (AV_CH_LAYOUT_SURROUND|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_5POINT1_BACK (AV_CH_LAYOUT_5POINT0_BACK|AV_CH_LOW_FREQUENCY)
#define AV_CH_LAYOUT_7POINT0 (AV_CH_LAYOUT_5POINT0|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_7POINT1 (AV_CH_LAYOUT_5POINT1|AV_CH_BACK_LEFT|AV_CH_BACK_RIGHT)
#define AV_CH_LAYOUT_7POINT1_WIDE (AV_CH_LAYOUT_5POINT1_BACK|AV_CH_FRONT_LEFT_OF_CENTER|AV_CH_FRONT_RIGHT_OF_CENTER)
#define AV_CH_LAYOUT_STEREO_DOWNMIX (AV_CH_STEREO_LEFT|AV_CH_STEREO_RIGHT)
/**
* Return a channel layout id that matches name, 0 if no match.
*/
int64_t av_get_channel_layout(const char *name);
/**
* Return a description of a channel layout.
* If nb_channels is <= 0, it is guessed from the channel_layout.
*
* @param buf put here the string containing the channel layout
* @param buf_size size in bytes of the buffer
*/
void av_get_channel_layout_string(char *buf, int buf_size, int nb_channels, int64_t channel_layout);
/**
* Return the number of channels in the channel layout.
*/
int av_get_channel_layout_nb_channels(int64_t channel_layout);
#endif /* AVUTIL_AUDIOCONVERT_H */
/*
* copyright (c) 2010 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* simple assert() macros that are a bit more flexible than ISO C assert().
* @author Michael Niedermayer <michaelni@gmx.at>
*/
#ifndef AVUTIL_AVASSERT_H
#define AVUTIL_AVASSERT_H
#include <stdlib.h>
#include "avutil.h"
#include "log.h"
/**
* assert() equivalent, that is always enabled.
*/
#define av_assert0(cond) do { \
if (!(cond)) { \
av_log(NULL, AV_LOG_FATAL, "Assertion %s failed at %s:%d\n", \
AV_STRINGIFY(cond), __FILE__, __LINE__); \
abort(); \
} \
} while (0)
/**
* assert() equivalent, that does not lie in speed critical code.
* These asserts() thus can be enabled without fearing speedloss.
*/
#if defined(ASSERT_LEVEL) && ASSERT_LEVEL > 0
#define av_assert1(cond) av_assert0(cond)
#else
#define av_assert1(cond) ((void)0)
#endif
/**
* assert() equivalent, that does lie in speed critical code.
*/
#if defined(ASSERT_LEVEL) && ASSERT_LEVEL > 1
#define av_assert2(cond) av_assert0(cond)
#else
#define av_assert2(cond) ((void)0)
#endif
#endif /* AVUTIL_AVASSERT_H */
/* Generated by ffconf */
#ifndef AVUTIL_AVCONFIG_H
#define AVUTIL_AVCONFIG_H
#define AV_HAVE_BIGENDIAN 0
#define AV_HAVE_FAST_UNALIGNED 1
#endif /* AVUTIL_AVCONFIG_H */
/*
* Copyright (c) 2007 Mans Rullgard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AVSTRING_H
#define AVUTIL_AVSTRING_H
#include <stddef.h>
/**
* Return non-zero if pfx is a prefix of str. If it is, *ptr is set to
* the address of the first character in str after the prefix.
*
* @param str input string
* @param pfx prefix to test
* @param ptr updated if the prefix is matched inside str
* @return non-zero if the prefix matches, zero otherwise
*/
int av_strstart(const char *str, const char *pfx, const char **ptr);
/**
* Return non-zero if pfx is a prefix of str independent of case. If
* it is, *ptr is set to the address of the first character in str
* after the prefix.
*
* @param str input string
* @param pfx prefix to test
* @param ptr updated if the prefix is matched inside str
* @return non-zero if the prefix matches, zero otherwise
*/
int av_stristart(const char *str, const char *pfx, const char **ptr);
/**
* Locate the first case-independent occurrence in the string haystack
* of the string needle. A zero-length string needle is considered to
* match at the start of haystack.
*
* This function is a case-insensitive version of the standard strstr().
*
* @param haystack string to search in
* @param needle string to search for
* @return pointer to the located match within haystack
* or a null pointer if no match
*/
char *av_stristr(const char *haystack, const char *needle);
/**
* Copy the string src to dst, but no more than size - 1 bytes, and
* null-terminate dst.
*
* This function is the same as BSD strlcpy().
*
* @param dst destination buffer
* @param src source string
* @param size size of destination buffer
* @return the length of src
*
* WARNING: since the return value is the length of src, src absolutely
* _must_ be a properly 0-terminated string, otherwise this will read beyond
* the end of the buffer and possibly crash.
*/
size_t av_strlcpy(char *dst, const char *src, size_t size);
/**
* Append the string src to the string dst, but to a total length of
* no more than size - 1 bytes, and null-terminate dst.
*
* This function is similar to BSD strlcat(), but differs when
* size <= strlen(dst).
*
* @param dst destination buffer
* @param src source string
* @param size size of destination buffer
* @return the total length of src and dst
*
* WARNING: since the return value use the length of src and dst, these absolutely
* _must_ be a properly 0-terminated strings, otherwise this will read beyond
* the end of the buffer and possibly crash.
*/
size_t av_strlcat(char *dst, const char *src, size_t size);
/**
* Append output to a string, according to a format. Never write out of
* the destination buffer, and always put a terminating 0 within
* the buffer.
* @param dst destination buffer (string to which the output is
* appended)
* @param size total size of the destination buffer
* @param fmt printf-compatible format string, specifying how the
* following parameters are used
* @return the length of the string that would have been generated
* if enough space had been available
*/
size_t av_strlcatf(char *dst, size_t size, const char *fmt, ...);
/**
* Convert a number to a av_malloced string.
*/
char *av_d2str(double d);
/**
* Unescape the given string until a non escaped terminating char,
* and return the token corresponding to the unescaped string.
*
* The normal \ and ' escaping is supported. Leading and trailing
* whitespaces are removed, unless they are escaped with '\' or are
* enclosed between ''.
*
* @param buf the buffer to parse, buf will be updated to point to the
* terminating char
* @param term a 0-terminated list of terminating chars
* @return the malloced unescaped string, which must be av_freed by
* the user, NULL in case of allocation failure
*/
char *av_get_token(const char **buf, const char *term);
#endif /* AVUTIL_AVSTRING_H */
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_AVUTIL_H
#define AVUTIL_AVUTIL_H
/**
* @file
* external API header
*/
#define AV_STRINGIFY(s) AV_TOSTRING(s)
#define AV_TOSTRING(s) #s
#define AV_GLUE(a, b) a ## b
#define AV_JOIN(a, b) AV_GLUE(a, b)
#define AV_PRAGMA(s) _Pragma(#s)
#define AV_VERSION_INT(a, b, c) (a<<16 | b<<8 | c)
#define AV_VERSION_DOT(a, b, c) a ##.## b ##.## c
#define AV_VERSION(a, b, c) AV_VERSION_DOT(a, b, c)
#define LIBAVUTIL_VERSION_MAJOR 51
#define LIBAVUTIL_VERSION_MINOR 9
#define LIBAVUTIL_VERSION_MICRO 1
#define LIBAVUTIL_VERSION_INT AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \
LIBAVUTIL_VERSION_MINOR, \
LIBAVUTIL_VERSION_MICRO)
#define LIBAVUTIL_VERSION AV_VERSION(LIBAVUTIL_VERSION_MAJOR, \
LIBAVUTIL_VERSION_MINOR, \
LIBAVUTIL_VERSION_MICRO)
#define LIBAVUTIL_BUILD LIBAVUTIL_VERSION_INT
#define LIBAVUTIL_IDENT "Lavu" AV_STRINGIFY(LIBAVUTIL_VERSION)
/**
* Those FF_API_* defines are not part of public API.
* They may change, break or disappear at any time.
*/
#ifndef FF_API_OLD_EVAL_NAMES
#define FF_API_OLD_EVAL_NAMES (LIBAVUTIL_VERSION_MAJOR < 52)
#endif
#ifndef FF_API_GET_BITS_PER_SAMPLE_FMT
#define FF_API_GET_BITS_PER_SAMPLE_FMT (LIBAVUTIL_VERSION_MAJOR < 52)
#endif
#ifndef FF_API_FIND_OPT
#define FF_API_FIND_OPT (LIBAVUTIL_VERSION_MAJOR < 52)
#endif
/**
* Return the LIBAVUTIL_VERSION_INT constant.
*/
unsigned avutil_version(void);
/**
* Return the libavutil build-time configuration.
*/
const char *avutil_configuration(void);
/**
* Return the libavutil license.
*/
const char *avutil_license(void);
enum AVMediaType {
AVMEDIA_TYPE_UNKNOWN = -1,
AVMEDIA_TYPE_VIDEO,
AVMEDIA_TYPE_AUDIO,
AVMEDIA_TYPE_DATA,
AVMEDIA_TYPE_SUBTITLE,
AVMEDIA_TYPE_ATTACHMENT,
AVMEDIA_TYPE_NB
};
#define FF_LAMBDA_SHIFT 7
#define FF_LAMBDA_SCALE (1<<FF_LAMBDA_SHIFT)
#define FF_QP2LAMBDA 118 ///< factor to convert from H.263 QP to lambda
#define FF_LAMBDA_MAX (256*128-1)
#define FF_QUALITY_SCALE FF_LAMBDA_SCALE //FIXME maybe remove
#define AV_NOPTS_VALUE INT64_C(0x8000000000000000)
#define AV_TIME_BASE 1000000
#define AV_TIME_BASE_Q (AVRational){1, AV_TIME_BASE}
enum AVPictureType {
AV_PICTURE_TYPE_I = 1, ///< Intra
AV_PICTURE_TYPE_P, ///< Predicted
AV_PICTURE_TYPE_B, ///< Bi-dir predicted
AV_PICTURE_TYPE_S, ///< S(GMC)-VOP MPEG4
AV_PICTURE_TYPE_SI, ///< Switching Intra
AV_PICTURE_TYPE_SP, ///< Switching Predicted
AV_PICTURE_TYPE_BI, ///< BI type
};
/**
* Return a single letter to describe the given picture type
* pict_type.
*
* @param[in] pict_type the picture type @return a single character
* representing the picture type, '?' if pict_type is unknown
*/
char av_get_picture_type_char(enum AVPictureType pict_type);
#include "common.h"
#include "error.h"
#include "mathematics.h"
#include "rational.h"
#include "intfloat_readwrite.h"
#include "log.h"
#include "pixfmt.h"
#endif /* AVUTIL_AVUTIL_H */
/*
* Copyright (c) 2006 Ryan Martell. (rdm4@martellventures.com)
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_BASE64_H
#define AVUTIL_BASE64_H
#include <stdint.h>
/**
* Decode a base64-encoded string.
*
* @param out buffer for decoded data
* @param in null-terminated input string
* @param out_size size in bytes of the out buffer, must be at
* least 3/4 of the length of in
* @return number of bytes written, or a negative value in case of
* invalid input
*/
int av_base64_decode(uint8_t *out, const char *in, int out_size);
/**
* Encode data to base64 and null-terminate.
*
* @param out buffer for encoded data
* @param out_size size in bytes of the output buffer, must be at
* least AV_BASE64_SIZE(in_size)
* @param in_size size in bytes of the 'in' buffer
* @return 'out' or NULL in case of error
*/
char *av_base64_encode(char *out, int out_size, const uint8_t *in, int in_size);
/**
* Calculate the output size needed to base64-encode x bytes.
*/
#define AV_BASE64_SIZE(x) (((x)+2) / 3 * 4 + 1)
#endif /* AVUTIL_BASE64_H */
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* byte swapping routines
*/
#ifndef AVUTIL_BSWAP_H
#define AVUTIL_BSWAP_H
#include <stdint.h>
#include "libavutil/avconfig.h"
#include "attributes.h"
#ifdef HAVE_AV_CONFIG_H
#include "config.h"
#if ARCH_ARM
# include "arm/bswap.h"
#elif ARCH_AVR32
# include "avr32/bswap.h"
#elif ARCH_BFIN
# include "bfin/bswap.h"
#elif ARCH_SH4
# include "sh4/bswap.h"
#elif ARCH_X86
# include "x86/bswap.h"
#endif
#endif /* HAVE_AV_CONFIG_H */
#define AV_BSWAP16C(x) (((x) << 8 & 0xff00) | ((x) >> 8 & 0x00ff))
#define AV_BSWAP32C(x) (AV_BSWAP16C(x) << 16 | AV_BSWAP16C((x) >> 16))
#define AV_BSWAP64C(x) (AV_BSWAP32C(x) << 32 | AV_BSWAP32C((x) >> 32))
#define AV_BSWAPC(s, x) AV_BSWAP##s##C(x)
#ifndef av_bswap16
static av_always_inline av_const uint16_t av_bswap16(uint16_t x)
{
x= (x>>8) | (x<<8);
return x;
}
#endif
#ifndef av_bswap32
static av_always_inline av_const uint32_t av_bswap32(uint32_t x)
{
x= ((x<<8)&0xFF00FF00) | ((x>>8)&0x00FF00FF);
x= (x>>16) | (x<<16);
return x;
}
#endif
#ifndef av_bswap64
static inline uint64_t av_const av_bswap64(uint64_t x)
{
#if 0
x= ((x<< 8)&0xFF00FF00FF00FF00ULL) | ((x>> 8)&0x00FF00FF00FF00FFULL);
x= ((x<<16)&0xFFFF0000FFFF0000ULL) | ((x>>16)&0x0000FFFF0000FFFFULL);
return (x>>32) | (x<<32);
#else
union {
uint64_t ll;
uint32_t l[2];
} w, r;
w.ll = x;
r.l[0] = av_bswap32 (w.l[1]);
r.l[1] = av_bswap32 (w.l[0]);
return r.ll;
#endif
}
#endif
// be2ne ... big-endian to native-endian
// le2ne ... little-endian to native-endian
#if AV_HAVE_BIGENDIAN
#define av_be2ne16(x) (x)
#define av_be2ne32(x) (x)
#define av_be2ne64(x) (x)
#define av_le2ne16(x) av_bswap16(x)
#define av_le2ne32(x) av_bswap32(x)
#define av_le2ne64(x) av_bswap64(x)
#define AV_BE2NEC(s, x) (x)
#define AV_LE2NEC(s, x) AV_BSWAPC(s, x)
#else
#define av_be2ne16(x) av_bswap16(x)
#define av_be2ne32(x) av_bswap32(x)
#define av_be2ne64(x) av_bswap64(x)
#define av_le2ne16(x) (x)
#define av_le2ne32(x) (x)
#define av_le2ne64(x) (x)
#define AV_BE2NEC(s, x) AV_BSWAPC(s, x)
#define AV_LE2NEC(s, x) (x)
#endif
#define AV_BE2NE16C(x) AV_BE2NEC(16, x)
#define AV_BE2NE32C(x) AV_BE2NEC(32, x)
#define AV_BE2NE64C(x) AV_BE2NEC(64, x)
#define AV_LE2NE16C(x) AV_LE2NEC(16, x)
#define AV_LE2NE32C(x) AV_LE2NEC(32, x)
#define AV_LE2NE64C(x) AV_LE2NEC(64, x)
#endif /* AVUTIL_BSWAP_H */
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* common internal and external API header
*/
#ifndef AVUTIL_COMMON_H
#define AVUTIL_COMMON_H
#include <ctype.h>
#include <errno.h>
#include <inttypes.h>
#include <limits.h>
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "attributes.h"
#include "libavutil/avconfig.h"
#if AV_HAVE_BIGENDIAN
# define AV_NE(be, le) (be)
#else
# define AV_NE(be, le) (le)
#endif
//rounded division & shift
#define RSHIFT(a,b) ((a) > 0 ? ((a) + ((1<<(b))>>1))>>(b) : ((a) + ((1<<(b))>>1)-1)>>(b))
/* assume b>0 */
#define ROUNDED_DIV(a,b) (((a)>0 ? (a) + ((b)>>1) : (a) - ((b)>>1))/(b))
#define FFUDIV(a,b) (((a)>0 ?(a):(a)-(b)+1) / (b))
#define FFUMOD(a,b) ((a)-(b)*FFUDIV(a,b))
#define FFABS(a) ((a) >= 0 ? (a) : (-(a)))
#define FFSIGN(a) ((a) > 0 ? 1 : -1)
#define FFMAX(a,b) ((a) > (b) ? (a) : (b))
#define FFMAX3(a,b,c) FFMAX(FFMAX(a,b),c)
#define FFMIN(a,b) ((a) > (b) ? (b) : (a))
#define FFMIN3(a,b,c) FFMIN(FFMIN(a,b),c)
#define FFSWAP(type,a,b) do{type SWAP_tmp= b; b= a; a= SWAP_tmp;}while(0)
#define FF_ARRAY_ELEMS(a) (sizeof(a) / sizeof((a)[0]))
#define FFALIGN(x, a) (((x)+(a)-1)&~((a)-1))
/* misc math functions */
extern const uint8_t ff_log2_tab[256];
extern const uint8_t av_reverse[256];
static av_always_inline av_const int av_log2_c(unsigned int v)
{
int n = 0;
if (v & 0xffff0000) {
v >>= 16;
n += 16;
}
if (v & 0xff00) {
v >>= 8;
n += 8;
}
n += ff_log2_tab[v];
return n;
}
static av_always_inline av_const int av_log2_16bit_c(unsigned int v)
{
int n = 0;
if (v & 0xff00) {
v >>= 8;
n += 8;
}
n += ff_log2_tab[v];
return n;
}
#ifdef HAVE_AV_CONFIG_H
# include "config.h"
# include "intmath.h"
#endif
/* Pull in unguarded fallback defines at the end of this file. */
#include "common.h"
/**
* Clip a signed integer value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const int av_clip_c(int a, int amin, int amax)
{
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/**
* Clip a signed integer value into the 0-255 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const uint8_t av_clip_uint8_c(int a)
{
if (a&(~0xFF)) return (-a)>>31;
else return a;
}
/**
* Clip a signed integer value into the -128,127 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const int8_t av_clip_int8_c(int a)
{
if ((a+0x80) & ~0xFF) return (a>>31) ^ 0x7F;
else return a;
}
/**
* Clip a signed integer value into the 0-65535 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const uint16_t av_clip_uint16_c(int a)
{
if (a&(~0xFFFF)) return (-a)>>31;
else return a;
}
/**
* Clip a signed integer value into the -32768,32767 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const int16_t av_clip_int16_c(int a)
{
if ((a+0x8000) & ~0xFFFF) return (a>>31) ^ 0x7FFF;
else return a;
}
/**
* Clip a signed 64-bit integer value into the -2147483648,2147483647 range.
* @param a value to clip
* @return clipped value
*/
static av_always_inline av_const int32_t av_clipl_int32_c(int64_t a)
{
if ((a+0x80000000u) & ~UINT64_C(0xFFFFFFFF)) return (a>>63) ^ 0x7FFFFFFF;
else return a;
}
/**
* Clip a signed integer to an unsigned power of two range.
* @param a value to clip
* @param p bit position to clip at
* @return clipped value
*/
static av_always_inline av_const unsigned av_clip_uintp2_c(int a, int p)
{
if (a & ~((1<<p) - 1)) return -a >> 31 & ((1<<p) - 1);
else return a;
}
/**
* Clip a float value into the amin-amax range.
* @param a value to clip
* @param amin minimum value of the clip range
* @param amax maximum value of the clip range
* @return clipped value
*/
static av_always_inline av_const float av_clipf_c(float a, float amin, float amax)
{
if (a < amin) return amin;
else if (a > amax) return amax;
else return a;
}
/** Compute ceil(log2(x)).
* @param x value used to compute ceil(log2(x))
* @return computed ceiling of log2(x)
*/
static av_always_inline av_const int av_ceil_log2_c(int x)
{
return av_log2((x - 1) << 1);
}
/**
* Count number of bits set to one in x
* @param x value to count bits of
* @return the number of bits set to one in x
*/
static av_always_inline av_const int av_popcount_c(uint32_t x)
{
x -= (x >> 1) & 0x55555555;
x = (x & 0x33333333) + ((x >> 2) & 0x33333333);
x = (x + (x >> 4)) & 0x0F0F0F0F;
x += x >> 8;
return (x + (x >> 16)) & 0x3F;
}
#define MKTAG(a,b,c,d) ((a) | ((b) << 8) | ((c) << 16) | ((d) << 24))
#define MKBETAG(a,b,c,d) ((d) | ((c) << 8) | ((b) << 16) | ((a) << 24))
/**
* Convert a UTF-8 character (up to 4 bytes) to its 32-bit UCS-4 encoded form.
*
* @param val Output value, must be an lvalue of type uint32_t.
* @param GET_BYTE Expression reading one byte from the input.
* Evaluated up to 7 times (4 for the currently
* assigned Unicode range). With a memory buffer
* input, this could be *ptr++.
* @param ERROR Expression to be evaluated on invalid input,
* typically a goto statement.
*/
#define GET_UTF8(val, GET_BYTE, ERROR)\
val= GET_BYTE;\
{\
int ones= 7 - av_log2(val ^ 255);\
if(ones==1)\
ERROR\
val&= 127>>ones;\
while(--ones > 0){\
int tmp= GET_BYTE - 128;\
if(tmp>>6)\
ERROR\
val= (val<<6) + tmp;\
}\
}
/**
* Convert a UTF-16 character (2 or 4 bytes) to its 32-bit UCS-4 encoded form.
*
* @param val Output value, must be an lvalue of type uint32_t.
* @param GET_16BIT Expression returning two bytes of UTF-16 data converted
* to native byte order. Evaluated one or two times.
* @param ERROR Expression to be evaluated on invalid input,
* typically a goto statement.
*/
#define GET_UTF16(val, GET_16BIT, ERROR)\
val = GET_16BIT;\
{\
unsigned int hi = val - 0xD800;\
if (hi < 0x800) {\
val = GET_16BIT - 0xDC00;\
if (val > 0x3FFU || hi > 0x3FFU)\
ERROR\
val += (hi<<10) + 0x10000;\
}\
}\
/*!
* \def PUT_UTF8(val, tmp, PUT_BYTE)
* Convert a 32-bit Unicode character to its UTF-8 encoded form (up to 4 bytes long).
* \param val is an input-only argument and should be of type uint32_t. It holds
* a UCS-4 encoded Unicode character that is to be converted to UTF-8. If
* val is given as a function it is executed only once.
* \param tmp is a temporary variable and should be of type uint8_t. It
* represents an intermediate value during conversion that is to be
* output by PUT_BYTE.
* \param PUT_BYTE writes the converted UTF-8 bytes to any proper destination.
* It could be a function or a statement, and uses tmp as the input byte.
* For example, PUT_BYTE could be "*output++ = tmp;" PUT_BYTE will be
* executed up to 4 times for values in the valid UTF-8 range and up to
* 7 times in the general case, depending on the length of the converted
* Unicode character.
*/
#define PUT_UTF8(val, tmp, PUT_BYTE)\
{\
int bytes, shift;\
uint32_t in = val;\
if (in < 0x80) {\
tmp = in;\
PUT_BYTE\
} else {\
bytes = (av_log2(in) + 4) / 5;\
shift = (bytes - 1) * 6;\
tmp = (256 - (256 >> bytes)) | (in >> shift);\
PUT_BYTE\
while (shift >= 6) {\
shift -= 6;\
tmp = 0x80 | ((in >> shift) & 0x3f);\
PUT_BYTE\
}\
}\
}
/*!
* \def PUT_UTF16(val, tmp, PUT_16BIT)
* Convert a 32-bit Unicode character to its UTF-16 encoded form (2 or 4 bytes).
* \param val is an input-only argument and should be of type uint32_t. It holds
* a UCS-4 encoded Unicode character that is to be converted to UTF-16. If
* val is given as a function it is executed only once.
* \param tmp is a temporary variable and should be of type uint16_t. It
* represents an intermediate value during conversion that is to be
* output by PUT_16BIT.
* \param PUT_16BIT writes the converted UTF-16 data to any proper destination
* in desired endianness. It could be a function or a statement, and uses tmp
* as the input byte. For example, PUT_BYTE could be "*output++ = tmp;"
* PUT_BYTE will be executed 1 or 2 times depending on input character.
*/
#define PUT_UTF16(val, tmp, PUT_16BIT)\
{\
uint32_t in = val;\
if (in < 0x10000) {\
tmp = in;\
PUT_16BIT\
} else {\
tmp = 0xD800 | ((in - 0x10000) >> 10);\
PUT_16BIT\
tmp = 0xDC00 | ((in - 0x10000) & 0x3FF);\
PUT_16BIT\
}\
}\
#include "mem.h"
#ifdef HAVE_AV_CONFIG_H
# include "internal.h"
#endif /* HAVE_AV_CONFIG_H */
#endif /* AVUTIL_COMMON_H */
/*
* The following definitions are outside the multiple inclusion guard
* to ensure they are immediately available in intmath.h.
*/
#ifndef av_log2
# define av_log2 av_log2_c
#endif
#ifndef av_log2_16bit
# define av_log2_16bit av_log2_16bit_c
#endif
#ifndef av_ceil_log2
# define av_ceil_log2 av_ceil_log2_c
#endif
#ifndef av_clip
# define av_clip av_clip_c
#endif
#ifndef av_clip_uint8
# define av_clip_uint8 av_clip_uint8_c
#endif
#ifndef av_clip_int8
# define av_clip_int8 av_clip_int8_c
#endif
#ifndef av_clip_uint16
# define av_clip_uint16 av_clip_uint16_c
#endif
#ifndef av_clip_int16
# define av_clip_int16 av_clip_int16_c
#endif
#ifndef av_clipl_int32
# define av_clipl_int32 av_clipl_int32_c
#endif
#ifndef av_clip_uintp2
# define av_clip_uintp2 av_clip_uintp2_c
#endif
#ifndef av_clipf
# define av_clipf av_clipf_c
#endif
#ifndef av_popcount
# define av_popcount av_popcount_c
#endif
/*
* Copyright (c) 2000, 2001, 2002 Fabrice Bellard
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_CPU_H
#define AVUTIL_CPU_H
#define AV_CPU_FLAG_FORCE 0x80000000 /* force usage of selected flags (OR) */
/* lower 16 bits - CPU features */
#define AV_CPU_FLAG_MMX 0x0001 ///< standard MMX
#define AV_CPU_FLAG_MMX2 0x0002 ///< SSE integer functions or AMD MMX ext
#define AV_CPU_FLAG_3DNOW 0x0004 ///< AMD 3DNOW
#define AV_CPU_FLAG_SSE 0x0008 ///< SSE functions
#define AV_CPU_FLAG_SSE2 0x0010 ///< PIV SSE2 functions
#define AV_CPU_FLAG_SSE2SLOW 0x40000000 ///< SSE2 supported, but usually not faster
#define AV_CPU_FLAG_3DNOWEXT 0x0020 ///< AMD 3DNowExt
#define AV_CPU_FLAG_SSE3 0x0040 ///< Prescott SSE3 functions
#define AV_CPU_FLAG_SSE3SLOW 0x20000000 ///< SSE3 supported, but usually not faster
#define AV_CPU_FLAG_SSSE3 0x0080 ///< Conroe SSSE3 functions
#define AV_CPU_FLAG_ATOM 0x10000000 ///< Atom processor, some SSSE3 instructions are slower
#define AV_CPU_FLAG_SSE4 0x0100 ///< Penryn SSE4.1 functions
#define AV_CPU_FLAG_SSE42 0x0200 ///< Nehalem SSE4.2 functions
#define AV_CPU_FLAG_AVX 0x4000 ///< AVX functions: requires OS support even if YMM registers aren't used
#define AV_CPU_FLAG_IWMMXT 0x0100 ///< XScale IWMMXT
#define AV_CPU_FLAG_ALTIVEC 0x0001 ///< standard
/**
* Return the flags which specify extensions supported by the CPU.
*/
int av_get_cpu_flags(void);
/**
* Disables cpu detection and forces the specified flags.
*/
void av_force_cpu_flags(int flags);
/* The following CPU-specific functions shall not be called directly. */
int ff_get_cpu_flags_arm(void);
int ff_get_cpu_flags_ppc(void);
int ff_get_cpu_flags_x86(void);
#endif /* AVUTIL_CPU_H */
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_CRC_H
#define AVUTIL_CRC_H
#include <stdint.h>
#include <stddef.h>
#include "attributes.h"
typedef uint32_t AVCRC;
typedef enum {
AV_CRC_8_ATM,
AV_CRC_16_ANSI,
AV_CRC_16_CCITT,
AV_CRC_32_IEEE,
AV_CRC_32_IEEE_LE, /*< reversed bitorder version of AV_CRC_32_IEEE */
AV_CRC_MAX, /*< Not part of public API! Do not use outside libavutil. */
}AVCRCId;
int av_crc_init(AVCRC *ctx, int le, int bits, uint32_t poly, int ctx_size);
const AVCRC *av_crc_get_table(AVCRCId crc_id);
uint32_t av_crc(const AVCRC *ctx, uint32_t start_crc, const uint8_t *buffer, size_t length) av_pure;
#endif /* AVUTIL_CRC_H */
/*
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file Public dictionary API.
*/
#ifndef AVUTIL_DICT_H
#define AVUTIL_DICT_H
#define AV_DICT_MATCH_CASE 1
#define AV_DICT_IGNORE_SUFFIX 2
#define AV_DICT_DONT_STRDUP_KEY 4
#define AV_DICT_DONT_STRDUP_VAL 8
#define AV_DICT_DONT_OVERWRITE 16 ///< Don't overwrite existing entries.
#define AV_DICT_APPEND 32 /**< If the entry already exists, append to it. Note that no
delimiter is added, the strings are simply concatenated. */
typedef struct {
char *key;
char *value;
} AVDictionaryEntry;
typedef struct AVDictionary AVDictionary;
/**
* Get a dictionary entry with matching key.
*
* @param prev Set to the previous matching element to find the next.
* If set to NULL the first matching element is returned.
* @param flags Allows case as well as suffix-insensitive comparisons.
* @return Found entry or NULL, changing key or value leads to undefined behavior.
*/
AVDictionaryEntry *
av_dict_get(AVDictionary *m, const char *key, const AVDictionaryEntry *prev, int flags);
/**
* Set the given entry in *pm, overwriting an existing entry.
*
* @param pm pointer to a pointer to a dictionary struct. If *pm is NULL
* a dictionary struct is allocated and put in *pm.
* @param key entry key to add to *pm (will be av_strduped depending on flags)
* @param value entry value to add to *pm (will be av_strduped depending on flags).
* Passing a NULL value will cause an existing tag to be deleted.
* @return >= 0 on success otherwise an error code <0
*/
int av_dict_set(AVDictionary **pm, const char *key, const char *value, int flags);
/**
* Copy entries from one AVDictionary struct into another.
* @param dst pointer to a pointer to a AVDictionary struct. If *dst is NULL,
* this function will allocate a struct for you and put it in *dst
* @param src pointer to source AVDictionary struct
* @param flags flags to use when setting entries in *dst
* @note metadata is read using the AV_DICT_IGNORE_SUFFIX flag
*/
void av_dict_copy(AVDictionary **dst, AVDictionary *src, int flags);
/**
* Free all the memory allocated for an AVDictionary struct.
*/
void av_dict_free(AVDictionary **m);
#endif // AVUTIL_DICT_H
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* error code definitions
*/
#ifndef AVUTIL_ERROR_H
#define AVUTIL_ERROR_H
#include <errno.h>
#include "avutil.h"
/* error handling */
#if EDOM > 0
#define AVERROR(e) (-(e)) ///< Returns a negative error code from a POSIX error code, to return from library functions.
#define AVUNERROR(e) (-(e)) ///< Returns a POSIX error code from a library function error return value.
#else
/* Some platforms have E* and errno already negated. */
#define AVERROR(e) (e)
#define AVUNERROR(e) (e)
#endif
#define AVERROR_BSF_NOT_FOUND (-MKTAG(0xF8,'B','S','F')) ///< Bitstream filter not found
#define AVERROR_DECODER_NOT_FOUND (-MKTAG(0xF8,'D','E','C')) ///< Decoder not found
#define AVERROR_DEMUXER_NOT_FOUND (-MKTAG(0xF8,'D','E','M')) ///< Demuxer not found
#define AVERROR_ENCODER_NOT_FOUND (-MKTAG(0xF8,'E','N','C')) ///< Encoder not found
#define AVERROR_EOF (-MKTAG( 'E','O','F',' ')) ///< End of file
#define AVERROR_EXIT (-MKTAG( 'E','X','I','T')) ///< Immediate exit was requested; the called function should not be restarted
#define AVERROR_FILTER_NOT_FOUND (-MKTAG(0xF8,'F','I','L')) ///< Filter not found
#define AVERROR_INVALIDDATA (-MKTAG( 'I','N','D','A')) ///< Invalid data found when processing input
#define AVERROR_MUXER_NOT_FOUND (-MKTAG(0xF8,'M','U','X')) ///< Muxer not found
#define AVERROR_OPTION_NOT_FOUND (-MKTAG(0xF8,'O','P','T')) ///< Option not found
#define AVERROR_PATCHWELCOME (-MKTAG( 'P','A','W','E')) ///< Not yet implemented in FFmpeg, patches welcome
#define AVERROR_PROTOCOL_NOT_FOUND (-MKTAG(0xF8,'P','R','O')) ///< Protocol not found
#define AVERROR_STREAM_NOT_FOUND (-MKTAG(0xF8,'S','T','R')) ///< Stream not found
/**
* Put a description of the AVERROR code errnum in errbuf.
* In case of failure the global variable errno is set to indicate the
* error. Even in case of failure av_strerror() will print a generic
* error message indicating the errnum provided to errbuf.
*
* @param errnum error code to describe
* @param errbuf buffer to which description is written
* @param errbuf_size the size in bytes of errbuf
* @return 0 on success, a negative value if a description for errnum
* cannot be found
*/
int av_strerror(int errnum, char *errbuf, size_t errbuf_size);
#endif /* AVUTIL_ERROR_H */
/*
* Copyright (c) 2002 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* simple arithmetic expression evaluator
*/
#ifndef AVUTIL_EVAL_H
#define AVUTIL_EVAL_H
#include "avutil.h"
typedef struct AVExpr AVExpr;
/**
* Parse and evaluate an expression.
* Note, this is significantly slower than av_expr_eval().
*
* @param res a pointer to a double where is put the result value of
* the expression, or NAN in case of error
* @param s expression as a zero terminated string, for example "1+2^3+5*5+sin(2/3)"
* @param const_names NULL terminated array of zero terminated strings of constant identifiers, for example {"PI", "E", 0}
* @param const_values a zero terminated array of values for the identifiers from const_names
* @param func1_names NULL terminated array of zero terminated strings of funcs1 identifiers
* @param funcs1 NULL terminated array of function pointers for functions which take 1 argument
* @param func2_names NULL terminated array of zero terminated strings of funcs2 identifiers
* @param funcs2 NULL terminated array of function pointers for functions which take 2 arguments
* @param opaque a pointer which will be passed to all functions from funcs1 and funcs2
* @param log_ctx parent logging context
* @return 0 in case of success, a negative value corresponding to an
* AVERROR code otherwise
*/
int av_expr_parse_and_eval(double *res, const char *s,
const char * const *const_names, const double *const_values,
const char * const *func1_names, double (* const *funcs1)(void *, double),
const char * const *func2_names, double (* const *funcs2)(void *, double, double),
void *opaque, int log_offset, void *log_ctx);
/**
* Parse an expression.
*
* @param expr a pointer where is put an AVExpr containing the parsed
* value in case of successfull parsing, or NULL otherwise.
* The pointed to AVExpr must be freed with av_expr_free() by the user
* when it is not needed anymore.
* @param s expression as a zero terminated string, for example "1+2^3+5*5+sin(2/3)"
* @param const_names NULL terminated array of zero terminated strings of constant identifiers, for example {"PI", "E", 0}
* @param func1_names NULL terminated array of zero terminated strings of funcs1 identifiers
* @param funcs1 NULL terminated array of function pointers for functions which take 1 argument
* @param func2_names NULL terminated array of zero terminated strings of funcs2 identifiers
* @param funcs2 NULL terminated array of function pointers for functions which take 2 arguments
* @param log_ctx parent logging context
* @return 0 in case of success, a negative value corresponding to an
* AVERROR code otherwise
*/
int av_expr_parse(AVExpr **expr, const char *s,
const char * const *const_names,
const char * const *func1_names, double (* const *funcs1)(void *, double),
const char * const *func2_names, double (* const *funcs2)(void *, double, double),
int log_offset, void *log_ctx);
/**
* Evaluate a previously parsed expression.
*
* @param const_values a zero terminated array of values for the identifiers from av_expr_parse() const_names
* @param opaque a pointer which will be passed to all functions from funcs1 and funcs2
* @return the value of the expression
*/
double av_expr_eval(AVExpr *e, const double *const_values, void *opaque);
/**
* Free a parsed expression previously created with av_expr_parse().
*/
void av_expr_free(AVExpr *e);
#if FF_API_OLD_EVAL_NAMES
/**
* @deprecated Deprecated in favor of av_expr_parse_and_eval().
*/
attribute_deprecated
int av_parse_and_eval_expr(double *res, const char *s,
const char * const *const_names, const double *const_values,
const char * const *func1_names, double (* const *funcs1)(void *, double),
const char * const *func2_names, double (* const *funcs2)(void *, double, double),
void *opaque, int log_offset, void *log_ctx);
/**
* @deprecated Deprecated in favor of av_expr_parse().
*/
attribute_deprecated
int av_parse_expr(AVExpr **expr, const char *s,
const char * const *const_names,
const char * const *func1_names, double (* const *funcs1)(void *, double),
const char * const *func2_names, double (* const *funcs2)(void *, double, double),
int log_offset, void *log_ctx);
/**
* @deprecated Deprecated in favor of av_expr_eval().
*/
attribute_deprecated
double av_eval_expr(AVExpr *e, const double *const_values, void *opaque);
/**
* @deprecated Deprecated in favor of av_expr_free().
*/
attribute_deprecated
void av_free_expr(AVExpr *e);
#endif /* FF_API_OLD_EVAL_NAMES */
/**
* Parse the string in numstr and return its value as a double. If
* the string is empty, contains only whitespaces, or does not contain
* an initial substring that has the expected syntax for a
* floating-point number, no conversion is performed. In this case,
* returns a value of zero and the value returned in tail is the value
* of numstr.
*
* @param numstr a string representing a number, may contain one of
* the International System number postfixes, for example 'K', 'M',
* 'G'. If 'i' is appended after the postfix, powers of 2 are used
* instead of powers of 10. The 'B' postfix multiplies the value for
* 8, and can be appended after another postfix or used alone. This
* allows using for example 'KB', 'MiB', 'G' and 'B' as postfix.
* @param tail if non-NULL puts here the pointer to the char next
* after the last parsed character
*/
double av_strtod(const char *numstr, char **tail);
#endif /* AVUTIL_EVAL_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* a very simple circular buffer FIFO implementation
*/
#ifndef AVUTIL_FIFO_H
#define AVUTIL_FIFO_H
#include <stdint.h>
typedef struct AVFifoBuffer {
uint8_t *buffer;
uint8_t *rptr, *wptr, *end;
uint32_t rndx, wndx;
} AVFifoBuffer;
/**
* Initialize an AVFifoBuffer.
* @param size of FIFO
* @return AVFifoBuffer or NULL in case of memory allocation failure
*/
AVFifoBuffer *av_fifo_alloc(unsigned int size);
/**
* Free an AVFifoBuffer.
* @param *f AVFifoBuffer to free
*/
void av_fifo_free(AVFifoBuffer *f);
/**
* Reset the AVFifoBuffer to the state right after av_fifo_alloc, in particular it is emptied.
* @param *f AVFifoBuffer to reset
*/
void av_fifo_reset(AVFifoBuffer *f);
/**
* Return the amount of data in bytes in the AVFifoBuffer, that is the
* amount of data you can read from it.
* @param *f AVFifoBuffer to read from
* @return size
*/
int av_fifo_size(AVFifoBuffer *f);
/**
* Return the amount of space in bytes in the AVFifoBuffer, that is the
* amount of data you can write into it.
* @param *f AVFifoBuffer to write into
* @return size
*/
int av_fifo_space(AVFifoBuffer *f);
/**
* Feed data from an AVFifoBuffer to a user-supplied callback.
* @param *f AVFifoBuffer to read from
* @param buf_size number of bytes to read
* @param *func generic read function
* @param *dest data destination
*/
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void (*func)(void*, void*, int));
/**
* Feed data from a user-supplied callback to an AVFifoBuffer.
* @param *f AVFifoBuffer to write to
* @param *src data source; non-const since it may be used as a
* modifiable context by the function defined in func
* @param size number of bytes to write
* @param *func generic write function; the first parameter is src,
* the second is dest_buf, the third is dest_buf_size.
* func must return the number of bytes written to dest_buf, or <= 0 to
* indicate no more data available to write.
* If func is NULL, src is interpreted as a simple byte array for source data.
* @return the number of bytes written to the FIFO
*/
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int (*func)(void*, void*, int));
/**
* Resize an AVFifoBuffer.
* @param *f AVFifoBuffer to resize
* @param size new AVFifoBuffer size in bytes
* @return <0 for failure, >=0 otherwise
*/
int av_fifo_realloc2(AVFifoBuffer *f, unsigned int size);
/**
* Read and discard the specified amount of data from an AVFifoBuffer.
* @param *f AVFifoBuffer to read from
* @param size amount of data to read in bytes
*/
void av_fifo_drain(AVFifoBuffer *f, int size);
static inline uint8_t av_fifo_peek(AVFifoBuffer *f, int offs)
{
uint8_t *ptr = f->rptr + offs;
if (ptr >= f->end)
ptr -= f->end - f->buffer;
return *ptr;
}
#endif /* AVUTIL_FIFO_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_FILE_H
#define AVUTIL_FILE_H
#include "avutil.h"
/**
* @file misc file utilities
*/
/**
* Read the file with name filename, and put its content in a newly
* allocated buffer or map it with mmap() when available.
* In case of success set *bufptr to the read or mmapped buffer, and
* *size to the size in bytes of the buffer in *bufptr.
* The returned buffer must be released with av_file_unmap().
*
* @param log_offset loglevel offset used for logging
* @param log_ctx context used for logging
* @return a non negative number in case of success, a negative value
* corresponding to an AVERROR error code in case of failure
*/
int av_file_map(const char *filename, uint8_t **bufptr, size_t *size,
int log_offset, void *log_ctx);
/**
* Unmap or free the buffer bufptr created by av_file_map().
*
* @param size size in bytes of bufptr, must be the same as returned
* by av_file_map()
*/
void av_file_unmap(uint8_t *bufptr, size_t size);
#endif /* AVUTIL_FILE_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_IMGUTILS_H
#define AVUTIL_IMGUTILS_H
/**
* @file
* misc image utilities
*/
#include "avutil.h"
#include "pixdesc.h"
/**
* Compute the max pixel step for each plane of an image with a
* format described by pixdesc.
*
* The pixel step is the distance in bytes between the first byte of
* the group of bytes which describe a pixel component and the first
* byte of the successive group in the same plane for the same
* component.
*
* @param max_pixsteps an array which is filled with the max pixel step
* for each plane. Since a plane may contain different pixel
* components, the computed max_pixsteps[plane] is relative to the
* component in the plane with the max pixel step.
* @param max_pixstep_comps an array which is filled with the component
* for each plane which has the max pixel step. May be NULL.
*/
void av_image_fill_max_pixsteps(int max_pixsteps[4], int max_pixstep_comps[4],
const AVPixFmtDescriptor *pixdesc);
/**
* Compute the size of an image line with format pix_fmt and width
* width for the plane plane.
*
* @return the computed size in bytes
*/
int av_image_get_linesize(enum PixelFormat pix_fmt, int width, int plane);
/**
* Fill plane linesizes for an image with pixel format pix_fmt and
* width width.
*
* @param linesizes array to be filled with the linesize for each plane
* @return >= 0 in case of success, a negative error code otherwise
*/
int av_image_fill_linesizes(int linesizes[4], enum PixelFormat pix_fmt, int width);
/**
* Fill plane data pointers for an image with pixel format pix_fmt and
* height height.
*
* @param data pointers array to be filled with the pointer for each image plane
* @param ptr the pointer to a buffer which will contain the image
* @param linesizes[4] the array containing the linesize for each
* plane, should be filled by av_image_fill_linesizes()
* @return the size in bytes required for the image buffer, a negative
* error code in case of failure
*/
int av_image_fill_pointers(uint8_t *data[4], enum PixelFormat pix_fmt, int height,
uint8_t *ptr, const int linesizes[4]);
/**
* Allocate an image with size w and h and pixel format pix_fmt, and
* fill pointers and linesizes accordingly.
* The allocated image buffer has to be freed by using
* av_freep(&pointers[0]).
*
* @param align the value to use for buffer size alignment
* @return the size in bytes required for the image buffer, a negative
* error code in case of failure
*/
int av_image_alloc(uint8_t *pointers[4], int linesizes[4],
int w, int h, enum PixelFormat pix_fmt, int align);
/**
* Copy image plane from src to dst.
* That is, copy "height" number of lines of "bytewidth" bytes each.
* The first byte of each successive line is separated by *_linesize
* bytes.
*
* @param dst_linesize linesize for the image plane in dst
* @param src_linesize linesize for the image plane in src
*/
void av_image_copy_plane(uint8_t *dst, int dst_linesize,
const uint8_t *src, int src_linesize,
int bytewidth, int height);
/**
* Copy image in src_data to dst_data.
*
* @param dst_linesize linesizes for the image in dst_data
* @param src_linesize linesizes for the image in src_data
*/
void av_image_copy(uint8_t *dst_data[4], int dst_linesizes[4],
const uint8_t *src_data[4], const int src_linesizes[4],
enum PixelFormat pix_fmt, int width, int height);
/**
* Check if the given dimension of an image is valid, meaning that all
* bytes of the image can be addressed with a signed int.
*
* @param w the width of the picture
* @param h the height of the picture
* @param log_offset the offset to sum to the log level for logging with log_ctx
* @param log_ctx the parent logging context, it may be NULL
* @return >= 0 if valid, a negative error code otherwise
*/
int av_image_check_size(unsigned int w, unsigned int h, int log_offset, void *log_ctx);
int ff_set_systematic_pal2(uint32_t pal[256], enum PixelFormat pix_fmt);
#endif /* AVUTIL_IMGUTILS_H */
/*
* copyright (c) 2005 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_INTFLOAT_READWRITE_H
#define AVUTIL_INTFLOAT_READWRITE_H
#include <stdint.h>
#include "attributes.h"
/* IEEE 80 bits extended float */
typedef struct AVExtFloat {
uint8_t exponent[2];
uint8_t mantissa[8];
} AVExtFloat;
double av_int2dbl(int64_t v) av_const;
float av_int2flt(int32_t v) av_const;
double av_ext2dbl(const AVExtFloat ext) av_const;
int64_t av_dbl2int(double d) av_const;
int32_t av_flt2int(float d) av_const;
AVExtFloat av_dbl2ext(double d) av_const;
#endif /* AVUTIL_INTFLOAT_READWRITE_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_INTREADWRITE_H
#define AVUTIL_INTREADWRITE_H
#include <stdint.h>
#include "libavutil/avconfig.h"
#include "attributes.h"
#include "bswap.h"
typedef union {
uint64_t u64;
uint32_t u32[2];
uint16_t u16[4];
uint8_t u8 [8];
double f64;
float f32[2];
} av_alias av_alias64;
typedef union {
uint32_t u32;
uint16_t u16[2];
uint8_t u8 [4];
float f32;
} av_alias av_alias32;
typedef union {
uint16_t u16;
uint8_t u8 [2];
} av_alias av_alias16;
/*
* Arch-specific headers can provide any combination of
* AV_[RW][BLN](16|24|32|64) and AV_(COPY|SWAP|ZERO)(64|128) macros.
* Preprocessor symbols must be defined, even if these are implemented
* as inline functions.
*/
#ifdef HAVE_AV_CONFIG_H
#include "config.h"
#if ARCH_ARM
# include "arm/intreadwrite.h"
#elif ARCH_AVR32
# include "avr32/intreadwrite.h"
#elif ARCH_MIPS
# include "mips/intreadwrite.h"
#elif ARCH_PPC
# include "ppc/intreadwrite.h"
#elif ARCH_TOMI
# include "tomi/intreadwrite.h"
#elif ARCH_X86
# include "x86/intreadwrite.h"
#endif
#endif /* HAVE_AV_CONFIG_H */
/*
* Map AV_RNXX <-> AV_R[BL]XX for all variants provided by per-arch headers.
*/
#if AV_HAVE_BIGENDIAN
# if defined(AV_RN16) && !defined(AV_RB16)
# define AV_RB16(p) AV_RN16(p)
# elif !defined(AV_RN16) && defined(AV_RB16)
# define AV_RN16(p) AV_RB16(p)
# endif
# if defined(AV_WN16) && !defined(AV_WB16)
# define AV_WB16(p, v) AV_WN16(p, v)
# elif !defined(AV_WN16) && defined(AV_WB16)
# define AV_WN16(p, v) AV_WB16(p, v)
# endif
# if defined(AV_RN24) && !defined(AV_RB24)
# define AV_RB24(p) AV_RN24(p)
# elif !defined(AV_RN24) && defined(AV_RB24)
# define AV_RN24(p) AV_RB24(p)
# endif
# if defined(AV_WN24) && !defined(AV_WB24)
# define AV_WB24(p, v) AV_WN24(p, v)
# elif !defined(AV_WN24) && defined(AV_WB24)
# define AV_WN24(p, v) AV_WB24(p, v)
# endif
# if defined(AV_RN32) && !defined(AV_RB32)
# define AV_RB32(p) AV_RN32(p)
# elif !defined(AV_RN32) && defined(AV_RB32)
# define AV_RN32(p) AV_RB32(p)
# endif
# if defined(AV_WN32) && !defined(AV_WB32)
# define AV_WB32(p, v) AV_WN32(p, v)
# elif !defined(AV_WN32) && defined(AV_WB32)
# define AV_WN32(p, v) AV_WB32(p, v)
# endif
# if defined(AV_RN64) && !defined(AV_RB64)
# define AV_RB64(p) AV_RN64(p)
# elif !defined(AV_RN64) && defined(AV_RB64)
# define AV_RN64(p) AV_RB64(p)
# endif
# if defined(AV_WN64) && !defined(AV_WB64)
# define AV_WB64(p, v) AV_WN64(p, v)
# elif !defined(AV_WN64) && defined(AV_WB64)
# define AV_WN64(p, v) AV_WB64(p, v)
# endif
#else /* AV_HAVE_BIGENDIAN */
# if defined(AV_RN16) && !defined(AV_RL16)
# define AV_RL16(p) AV_RN16(p)
# elif !defined(AV_RN16) && defined(AV_RL16)
# define AV_RN16(p) AV_RL16(p)
# endif
# if defined(AV_WN16) && !defined(AV_WL16)
# define AV_WL16(p, v) AV_WN16(p, v)
# elif !defined(AV_WN16) && defined(AV_WL16)
# define AV_WN16(p, v) AV_WL16(p, v)
# endif
# if defined(AV_RN24) && !defined(AV_RL24)
# define AV_RL24(p) AV_RN24(p)
# elif !defined(AV_RN24) && defined(AV_RL24)
# define AV_RN24(p) AV_RL24(p)
# endif
# if defined(AV_WN24) && !defined(AV_WL24)
# define AV_WL24(p, v) AV_WN24(p, v)
# elif !defined(AV_WN24) && defined(AV_WL24)
# define AV_WN24(p, v) AV_WL24(p, v)
# endif
# if defined(AV_RN32) && !defined(AV_RL32)
# define AV_RL32(p) AV_RN32(p)
# elif !defined(AV_RN32) && defined(AV_RL32)
# define AV_RN32(p) AV_RL32(p)
# endif
# if defined(AV_WN32) && !defined(AV_WL32)
# define AV_WL32(p, v) AV_WN32(p, v)
# elif !defined(AV_WN32) && defined(AV_WL32)
# define AV_WN32(p, v) AV_WL32(p, v)
# endif
# if defined(AV_RN64) && !defined(AV_RL64)
# define AV_RL64(p) AV_RN64(p)
# elif !defined(AV_RN64) && defined(AV_RL64)
# define AV_RN64(p) AV_RL64(p)
# endif
# if defined(AV_WN64) && !defined(AV_WL64)
# define AV_WL64(p, v) AV_WN64(p, v)
# elif !defined(AV_WN64) && defined(AV_WL64)
# define AV_WN64(p, v) AV_WL64(p, v)
# endif
#endif /* !AV_HAVE_BIGENDIAN */
/*
* Define AV_[RW]N helper macros to simplify definitions not provided
* by per-arch headers.
*/
#if defined(__GNUC__) && !defined(__TI_COMPILER_VERSION__)
union unaligned_64 { uint64_t l; } __attribute__((packed)) av_alias;
union unaligned_32 { uint32_t l; } __attribute__((packed)) av_alias;
union unaligned_16 { uint16_t l; } __attribute__((packed)) av_alias;
# define AV_RN(s, p) (((const union unaligned_##s *) (p))->l)
# define AV_WN(s, p, v) ((((union unaligned_##s *) (p))->l) = (v))
#elif defined(__DECC)
# define AV_RN(s, p) (*((const __unaligned uint##s##_t*)(p)))
# define AV_WN(s, p, v) (*((__unaligned uint##s##_t*)(p)) = (v))
#elif AV_HAVE_FAST_UNALIGNED
# define AV_RN(s, p) (((const av_alias##s*)(p))->u##s)
# define AV_WN(s, p, v) (((av_alias##s*)(p))->u##s = (v))
#else
#ifndef AV_RB16
# define AV_RB16(x) \
((((const uint8_t*)(x))[0] << 8) | \
((const uint8_t*)(x))[1])
#endif
#ifndef AV_WB16
# define AV_WB16(p, d) do { \
((uint8_t*)(p))[1] = (d); \
((uint8_t*)(p))[0] = (d)>>8; \
} while(0)
#endif
#ifndef AV_RL16
# define AV_RL16(x) \
((((const uint8_t*)(x))[1] << 8) | \
((const uint8_t*)(x))[0])
#endif
#ifndef AV_WL16
# define AV_WL16(p, d) do { \
((uint8_t*)(p))[0] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
} while(0)
#endif
#ifndef AV_RB32
# define AV_RB32(x) \
(((uint32_t)((const uint8_t*)(x))[0] << 24) | \
(((const uint8_t*)(x))[1] << 16) | \
(((const uint8_t*)(x))[2] << 8) | \
((const uint8_t*)(x))[3])
#endif
#ifndef AV_WB32
# define AV_WB32(p, d) do { \
((uint8_t*)(p))[3] = (d); \
((uint8_t*)(p))[2] = (d)>>8; \
((uint8_t*)(p))[1] = (d)>>16; \
((uint8_t*)(p))[0] = (d)>>24; \
} while(0)
#endif
#ifndef AV_RL32
# define AV_RL32(x) \
(((uint32_t)((const uint8_t*)(x))[3] << 24) | \
(((const uint8_t*)(x))[2] << 16) | \
(((const uint8_t*)(x))[1] << 8) | \
((const uint8_t*)(x))[0])
#endif
#ifndef AV_WL32
# define AV_WL32(p, d) do { \
((uint8_t*)(p))[0] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
((uint8_t*)(p))[2] = (d)>>16; \
((uint8_t*)(p))[3] = (d)>>24; \
} while(0)
#endif
#ifndef AV_RB64
# define AV_RB64(x) \
(((uint64_t)((const uint8_t*)(x))[0] << 56) | \
((uint64_t)((const uint8_t*)(x))[1] << 48) | \
((uint64_t)((const uint8_t*)(x))[2] << 40) | \
((uint64_t)((const uint8_t*)(x))[3] << 32) | \
((uint64_t)((const uint8_t*)(x))[4] << 24) | \
((uint64_t)((const uint8_t*)(x))[5] << 16) | \
((uint64_t)((const uint8_t*)(x))[6] << 8) | \
(uint64_t)((const uint8_t*)(x))[7])
#endif
#ifndef AV_WB64
# define AV_WB64(p, d) do { \
((uint8_t*)(p))[7] = (d); \
((uint8_t*)(p))[6] = (d)>>8; \
((uint8_t*)(p))[5] = (d)>>16; \
((uint8_t*)(p))[4] = (d)>>24; \
((uint8_t*)(p))[3] = (d)>>32; \
((uint8_t*)(p))[2] = (d)>>40; \
((uint8_t*)(p))[1] = (d)>>48; \
((uint8_t*)(p))[0] = (d)>>56; \
} while(0)
#endif
#ifndef AV_RL64
# define AV_RL64(x) \
(((uint64_t)((const uint8_t*)(x))[7] << 56) | \
((uint64_t)((const uint8_t*)(x))[6] << 48) | \
((uint64_t)((const uint8_t*)(x))[5] << 40) | \
((uint64_t)((const uint8_t*)(x))[4] << 32) | \
((uint64_t)((const uint8_t*)(x))[3] << 24) | \
((uint64_t)((const uint8_t*)(x))[2] << 16) | \
((uint64_t)((const uint8_t*)(x))[1] << 8) | \
(uint64_t)((const uint8_t*)(x))[0])
#endif
#ifndef AV_WL64
# define AV_WL64(p, d) do { \
((uint8_t*)(p))[0] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
((uint8_t*)(p))[2] = (d)>>16; \
((uint8_t*)(p))[3] = (d)>>24; \
((uint8_t*)(p))[4] = (d)>>32; \
((uint8_t*)(p))[5] = (d)>>40; \
((uint8_t*)(p))[6] = (d)>>48; \
((uint8_t*)(p))[7] = (d)>>56; \
} while(0)
#endif
#if AV_HAVE_BIGENDIAN
# define AV_RN(s, p) AV_RB##s(p)
# define AV_WN(s, p, v) AV_WB##s(p, v)
#else
# define AV_RN(s, p) AV_RL##s(p)
# define AV_WN(s, p, v) AV_WL##s(p, v)
#endif
#endif /* HAVE_FAST_UNALIGNED */
#ifndef AV_RN16
# define AV_RN16(p) AV_RN(16, p)
#endif
#ifndef AV_RN32
# define AV_RN32(p) AV_RN(32, p)
#endif
#ifndef AV_RN64
# define AV_RN64(p) AV_RN(64, p)
#endif
#ifndef AV_WN16
# define AV_WN16(p, v) AV_WN(16, p, v)
#endif
#ifndef AV_WN32
# define AV_WN32(p, v) AV_WN(32, p, v)
#endif
#ifndef AV_WN64
# define AV_WN64(p, v) AV_WN(64, p, v)
#endif
#if AV_HAVE_BIGENDIAN
# define AV_RB(s, p) AV_RN##s(p)
# define AV_WB(s, p, v) AV_WN##s(p, v)
# define AV_RL(s, p) av_bswap##s(AV_RN##s(p))
# define AV_WL(s, p, v) AV_WN##s(p, av_bswap##s(v))
#else
# define AV_RB(s, p) av_bswap##s(AV_RN##s(p))
# define AV_WB(s, p, v) AV_WN##s(p, av_bswap##s(v))
# define AV_RL(s, p) AV_RN##s(p)
# define AV_WL(s, p, v) AV_WN##s(p, v)
#endif
#define AV_RB8(x) (((const uint8_t*)(x))[0])
#define AV_WB8(p, d) do { ((uint8_t*)(p))[0] = (d); } while(0)
#define AV_RL8(x) AV_RB8(x)
#define AV_WL8(p, d) AV_WB8(p, d)
#ifndef AV_RB16
# define AV_RB16(p) AV_RB(16, p)
#endif
#ifndef AV_WB16
# define AV_WB16(p, v) AV_WB(16, p, v)
#endif
#ifndef AV_RL16
# define AV_RL16(p) AV_RL(16, p)
#endif
#ifndef AV_WL16
# define AV_WL16(p, v) AV_WL(16, p, v)
#endif
#ifndef AV_RB32
# define AV_RB32(p) AV_RB(32, p)
#endif
#ifndef AV_WB32
# define AV_WB32(p, v) AV_WB(32, p, v)
#endif
#ifndef AV_RL32
# define AV_RL32(p) AV_RL(32, p)
#endif
#ifndef AV_WL32
# define AV_WL32(p, v) AV_WL(32, p, v)
#endif
#ifndef AV_RB64
# define AV_RB64(p) AV_RB(64, p)
#endif
#ifndef AV_WB64
# define AV_WB64(p, v) AV_WB(64, p, v)
#endif
#ifndef AV_RL64
# define AV_RL64(p) AV_RL(64, p)
#endif
#ifndef AV_WL64
# define AV_WL64(p, v) AV_WL(64, p, v)
#endif
#ifndef AV_RB24
# define AV_RB24(x) \
((((const uint8_t*)(x))[0] << 16) | \
(((const uint8_t*)(x))[1] << 8) | \
((const uint8_t*)(x))[2])
#endif
#ifndef AV_WB24
# define AV_WB24(p, d) do { \
((uint8_t*)(p))[2] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
((uint8_t*)(p))[0] = (d)>>16; \
} while(0)
#endif
#ifndef AV_RL24
# define AV_RL24(x) \
((((const uint8_t*)(x))[2] << 16) | \
(((const uint8_t*)(x))[1] << 8) | \
((const uint8_t*)(x))[0])
#endif
#ifndef AV_WL24
# define AV_WL24(p, d) do { \
((uint8_t*)(p))[0] = (d); \
((uint8_t*)(p))[1] = (d)>>8; \
((uint8_t*)(p))[2] = (d)>>16; \
} while(0)
#endif
/*
* The AV_[RW]NA macros access naturally aligned data
* in a type-safe way.
*/
#define AV_RNA(s, p) (((const av_alias##s*)(p))->u##s)
#define AV_WNA(s, p, v) (((av_alias##s*)(p))->u##s = (v))
#ifndef AV_RN16A
# define AV_RN16A(p) AV_RNA(16, p)
#endif
#ifndef AV_RN32A
# define AV_RN32A(p) AV_RNA(32, p)
#endif
#ifndef AV_RN64A
# define AV_RN64A(p) AV_RNA(64, p)
#endif
#ifndef AV_WN16A
# define AV_WN16A(p, v) AV_WNA(16, p, v)
#endif
#ifndef AV_WN32A
# define AV_WN32A(p, v) AV_WNA(32, p, v)
#endif
#ifndef AV_WN64A
# define AV_WN64A(p, v) AV_WNA(64, p, v)
#endif
/* Parameters for AV_COPY*, AV_SWAP*, AV_ZERO* must be
* naturally aligned. They may be implemented using MMX,
* so emms_c() must be called before using any float code
* afterwards.
*/
#define AV_COPY(n, d, s) \
(((av_alias##n*)(d))->u##n = ((const av_alias##n*)(s))->u##n)
#ifndef AV_COPY16
# define AV_COPY16(d, s) AV_COPY(16, d, s)
#endif
#ifndef AV_COPY32
# define AV_COPY32(d, s) AV_COPY(32, d, s)
#endif
#ifndef AV_COPY64
# define AV_COPY64(d, s) AV_COPY(64, d, s)
#endif
#ifndef AV_COPY128
# define AV_COPY128(d, s) \
do { \
AV_COPY64(d, s); \
AV_COPY64((char*)(d)+8, (char*)(s)+8); \
} while(0)
#endif
#define AV_SWAP(n, a, b) FFSWAP(av_alias##n, *(av_alias##n*)(a), *(av_alias##n*)(b))
#ifndef AV_SWAP64
# define AV_SWAP64(a, b) AV_SWAP(64, a, b)
#endif
#define AV_ZERO(n, d) (((av_alias##n*)(d))->u##n = 0)
#ifndef AV_ZERO16
# define AV_ZERO16(d) AV_ZERO(16, d)
#endif
#ifndef AV_ZERO32
# define AV_ZERO32(d) AV_ZERO(32, d)
#endif
#ifndef AV_ZERO64
# define AV_ZERO64(d) AV_ZERO(64, d)
#endif
#ifndef AV_ZERO128
# define AV_ZERO128(d) \
do { \
AV_ZERO64(d); \
AV_ZERO64((char*)(d)+8); \
} while(0)
#endif
#endif /* AVUTIL_INTREADWRITE_H */
/*
* Lagged Fibonacci PRNG
* Copyright (c) 2008 Michael Niedermayer
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_LFG_H
#define AVUTIL_LFG_H
typedef struct {
unsigned int state[64];
int index;
} AVLFG;
void av_lfg_init(AVLFG *c, unsigned int seed);
/**
* Get the next random unsigned 32-bit number using an ALFG.
*
* Please also consider a simple LCG like state= state*1664525+1013904223,
* it may be good enough and faster for your specific use case.
*/
static inline unsigned int av_lfg_get(AVLFG *c){
c->state[c->index & 63] = c->state[(c->index-24) & 63] + c->state[(c->index-55) & 63];
return c->state[c->index++ & 63];
}
/**
* Get the next random unsigned 32-bit number using a MLFG.
*
* Please also consider av_lfg_get() above, it is faster.
*/
static inline unsigned int av_mlfg_get(AVLFG *c){
unsigned int a= c->state[(c->index-55) & 63];
unsigned int b= c->state[(c->index-24) & 63];
return c->state[c->index++ & 63] = 2*a*b+a+b;
}
/**
* Get the next two numbers generated by a Box-Muller Gaussian
* generator using the random numbers issued by lfg.
*
* @param out[2] array where the two generated numbers are placed
*/
void av_bmg_get(AVLFG *lfg, double out[2]);
#endif /* AVUTIL_LFG_H */
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_LOG_H
#define AVUTIL_LOG_H
#include <stdarg.h>
#include "avutil.h"
/**
* Describe the class of an AVClass context structure. That is an
* arbitrary struct of which the first field is a pointer to an
* AVClass struct (e.g. AVCodecContext, AVFormatContext etc.).
*/
typedef struct {
/**
* The name of the class; usually it is the same name as the
* context structure type to which the AVClass is associated.
*/
const char* class_name;
/**
* A pointer to a function which returns the name of a context
* instance ctx associated with the class.
*/
const char* (*item_name)(void* ctx);
/**
* a pointer to the first option specified in the class if any or NULL
*
* @see av_set_default_options()
*/
const struct AVOption *option;
/**
* LIBAVUTIL_VERSION with which this structure was created.
* This is used to allow fields to be added without requiring major
* version bumps everywhere.
*/
int version;
/**
* Offset in the structure where log_level_offset is stored.
* 0 means there is no such variable
*/
int log_level_offset_offset;
/**
* Offset in the structure where a pointer to the parent context for loging is stored.
* for example a decoder that uses eval.c could pass its AVCodecContext to eval as such
* parent context. And a av_log() implementation could then display the parent context
* can be NULL of course
*/
int parent_log_context_offset;
/**
* A function for extended searching, e.g. in possible
* children objects.
*/
const struct AVOption* (*opt_find)(void *obj, const char *name, const char *unit,
int opt_flags, int search_flags);
} AVClass;
/* av_log API */
#define AV_LOG_QUIET -8
/**
* Something went really wrong and we will crash now.
*/
#define AV_LOG_PANIC 0
/**
* Something went wrong and recovery is not possible.
* For example, no header was found for a format which depends
* on headers or an illegal combination of parameters is used.
*/
#define AV_LOG_FATAL 8
/**
* Something went wrong and cannot losslessly be recovered.
* However, not all future data is affected.
*/
#define AV_LOG_ERROR 16
/**
* Something somehow does not look correct. This may or may not
* lead to problems. An example would be the use of '-vstrict -2'.
*/
#define AV_LOG_WARNING 24
#define AV_LOG_INFO 32
#define AV_LOG_VERBOSE 40
/**
* Stuff which is only useful for libav* developers.
*/
#define AV_LOG_DEBUG 48
/**
* Send the specified message to the log if the level is less than or equal
* to the current av_log_level. By default, all logging messages are sent to
* stderr. This behavior can be altered by setting a different av_vlog callback
* function.
*
* @param avcl A pointer to an arbitrary struct of which the first field is a
* pointer to an AVClass struct.
* @param level The importance level of the message, lower values signifying
* higher importance.
* @param fmt The format string (printf-compatible) that specifies how
* subsequent arguments are converted to output.
* @see av_vlog
*/
#ifdef __GNUC__
void av_log(void *avcl, int level, const char *fmt, ...) __attribute__ ((__format__ (__printf__, 3, 4)));
#else
void av_log(void *avcl, int level, const char *fmt, ...);
#endif
void av_vlog(void *avcl, int level, const char *fmt, va_list);
int av_log_get_level(void);
void av_log_set_level(int);
void av_log_set_callback(void (*)(void*, int, const char*, va_list));
void av_log_default_callback(void* ptr, int level, const char* fmt, va_list vl);
const char* av_default_item_name(void* ctx);
/**
* av_dlog macros
* Useful to print debug messages that shouldn't get compiled in normally.
*/
#ifdef DEBUG
# define av_dlog(pctx, ...) av_log(pctx, AV_LOG_DEBUG, __VA_ARGS__)
#else
# define av_dlog(pctx, ...) do { if (0) av_log(pctx, AV_LOG_DEBUG, __VA_ARGS__); } while (0)
#endif
/**
* Skip repeated messages, this requires the user app to use av_log() instead of
* (f)printf as the 2 would otherwise interfere and lead to
* "Last message repeated x times" messages below (f)printf messages with some
* bad luck.
* Also to receive the last, "last repeated" line if any, the user app must
* call av_log(NULL, AV_LOG_QUIET, "%s", ""); at the end
*/
#define AV_LOG_SKIP_REPEATED 1
void av_log_set_flags(int arg);
#endif /* AVUTIL_LOG_H */
/*
* LZO 1x decompression
* copyright (c) 2006 Reimar Doeffinger
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_LZO_H
#define AVUTIL_LZO_H
#include <stdint.h>
/** \defgroup errflags Error flags returned by av_lzo1x_decode
* \{ */
//! end of the input buffer reached before decoding finished
#define AV_LZO_INPUT_DEPLETED 1
//! decoded data did not fit into output buffer
#define AV_LZO_OUTPUT_FULL 2
//! a reference to previously decoded data was wrong
#define AV_LZO_INVALID_BACKPTR 4
//! a non-specific error in the compressed bitstream
#define AV_LZO_ERROR 8
/** \} */
#define AV_LZO_INPUT_PADDING 8
#define AV_LZO_OUTPUT_PADDING 12
/**
* \brief Decodes LZO 1x compressed data.
* \param out output buffer
* \param outlen size of output buffer, number of bytes left are returned here
* \param in input buffer
* \param inlen size of input buffer, number of bytes left are returned here
* \return 0 on success, otherwise a combination of the error flags above
*
* Make sure all buffers are appropriately padded, in must provide
* AV_LZO_INPUT_PADDING, out must provide AV_LZO_OUTPUT_PADDING additional bytes.
*/
int av_lzo1x_decode(void *out, int *outlen, const void *in, int *inlen);
/**
* \brief deliberately overlapping memcpy implementation
* \param dst destination buffer; must be padded with 12 additional bytes
* \param back how many bytes back we start (the initial size of the overlapping window)
* \param cnt number of bytes to copy, must be >= 0
*
* cnt > back is valid, this will copy the bytes we just copied,
* thus creating a repeating pattern with a period length of back.
*/
void av_memcpy_backptr(uint8_t *dst, int back, int cnt);
#endif /* AVUTIL_LZO_H */
/*
* copyright (c) 2005 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_MATHEMATICS_H
#define AVUTIL_MATHEMATICS_H
#include <stdint.h>
#include <math.h>
#include "attributes.h"
#include "rational.h"
#ifndef M_E
#define M_E 2.7182818284590452354 /* e */
#endif
#ifndef M_LN2
#define M_LN2 0.69314718055994530942 /* log_e 2 */
#endif
#ifndef M_LN10
#define M_LN10 2.30258509299404568402 /* log_e 10 */
#endif
#ifndef M_LOG2_10
#define M_LOG2_10 3.32192809488736234787 /* log_2 10 */
#endif
#ifndef M_PHI
#define M_PHI 1.61803398874989484820 /* phi / golden ratio */
#endif
#ifndef M_PI
#define M_PI 3.14159265358979323846 /* pi */
#endif
#ifndef M_SQRT1_2
#define M_SQRT1_2 0.70710678118654752440 /* 1/sqrt(2) */
#endif
#ifndef M_SQRT2
#define M_SQRT2 1.41421356237309504880 /* sqrt(2) */
#endif
#ifndef NAN
#define NAN (0.0/0.0)
#endif
#ifndef INFINITY
#define INFINITY (1.0/0.0)
#endif
enum AVRounding {
AV_ROUND_ZERO = 0, ///< Round toward zero.
AV_ROUND_INF = 1, ///< Round away from zero.
AV_ROUND_DOWN = 2, ///< Round toward -infinity.
AV_ROUND_UP = 3, ///< Round toward +infinity.
AV_ROUND_NEAR_INF = 5, ///< Round to nearest and halfway cases away from zero.
};
/**
* Return the greatest common divisor of a and b.
* If both a and b are 0 or either or both are <0 then behavior is
* undefined.
*/
int64_t av_const av_gcd(int64_t a, int64_t b);
/**
* Rescale a 64-bit integer with rounding to nearest.
* A simple a*b/c isn't possible as it can overflow.
*/
int64_t av_rescale(int64_t a, int64_t b, int64_t c) av_const;
/**
* Rescale a 64-bit integer with specified rounding.
* A simple a*b/c isn't possible as it can overflow.
*/
int64_t av_rescale_rnd(int64_t a, int64_t b, int64_t c, enum AVRounding) av_const;
/**
* Rescale a 64-bit integer by 2 rational numbers.
*/
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq) av_const;
/**
* Compare 2 timestamps each in its own timebases.
* The result of the function is undefined if one of the timestamps
* is outside the int64_t range when represented in the others timebase.
* @return -1 if ts_a is before ts_b, 1 if ts_a is after ts_b or 0 if they represent the same position
*/
int av_compare_ts(int64_t ts_a, AVRational tb_a, int64_t ts_b, AVRational tb_b);
/**
* Compare 2 integers modulo mod.
* That is we compare integers a and b for which only the least
* significant log2(mod) bits are known.
*
* @param mod must be a power of 2
* @return a negative value if a is smaller than b
* a positive value if a is greater than b
* 0 if a equals b
*/
int64_t av_compare_mod(uint64_t a, uint64_t b, uint64_t mod);
#endif /* AVUTIL_MATHEMATICS_H */
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_MD5_H
#define AVUTIL_MD5_H
#include <stdint.h>
extern const int av_md5_size;
struct AVMD5;
void av_md5_init(struct AVMD5 *ctx);
void av_md5_update(struct AVMD5 *ctx, const uint8_t *src, const int len);
void av_md5_final(struct AVMD5 *ctx, uint8_t *dst);
void av_md5_sum(uint8_t *dst, const uint8_t *src, const int len);
#endif /* AVUTIL_MD5_H */
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* memory handling functions
*/
#ifndef AVUTIL_MEM_H
#define AVUTIL_MEM_H
#include "attributes.h"
#include "avutil.h"
#if defined(__INTEL_COMPILER) && __INTEL_COMPILER < 1110 || defined(__SUNPRO_C)
#define DECLARE_ALIGNED(n,t,v) t __attribute__ ((aligned (n))) v
#define DECLARE_ASM_CONST(n,t,v) const t __attribute__ ((aligned (n))) v
#elif defined(__TI_COMPILER_VERSION__)
#define DECLARE_ALIGNED(n,t,v) \
AV_PRAGMA(DATA_ALIGN(v,n)) \
t __attribute__((aligned(n))) v
#define DECLARE_ASM_CONST(n,t,v) \
AV_PRAGMA(DATA_ALIGN(v,n)) \
static const t __attribute__((aligned(n))) v
#elif defined(__GNUC__)
#define DECLARE_ALIGNED(n,t,v) t __attribute__ ((aligned (n))) v
#define DECLARE_ASM_CONST(n,t,v) static const t av_used __attribute__ ((aligned (n))) v
#elif defined(_MSC_VER)
#define DECLARE_ALIGNED(n,t,v) __declspec(align(n)) t v
#define DECLARE_ASM_CONST(n,t,v) __declspec(align(n)) static const t v
#else
#define DECLARE_ALIGNED(n,t,v) t v
#define DECLARE_ASM_CONST(n,t,v) static const t v
#endif
#if AV_GCC_VERSION_AT_LEAST(3,1)
#define av_malloc_attrib __attribute__((__malloc__))
#else
#define av_malloc_attrib
#endif
#if AV_GCC_VERSION_AT_LEAST(4,3)
#define av_alloc_size(n) __attribute__((alloc_size(n)))
#else
#define av_alloc_size(n)
#endif
/**
* Allocate a block of size bytes with alignment suitable for all
* memory accesses (including vectors if available on the CPU).
* @param size Size in bytes for the memory block to be allocated.
* @return Pointer to the allocated block, NULL if the block cannot
* be allocated.
* @see av_mallocz()
*/
void *av_malloc(size_t size) av_malloc_attrib av_alloc_size(1);
/**
* Allocate or reallocate a block of memory.
* If ptr is NULL and size > 0, allocate a new block. If
* size is zero, free the memory block pointed to by ptr.
* @param size Size in bytes for the memory block to be allocated or
* reallocated.
* @param ptr Pointer to a memory block already allocated with
* av_malloc(z)() or av_realloc() or NULL.
* @return Pointer to a newly reallocated block or NULL if the block
* cannot be reallocated or the function is used to free the memory block.
* @see av_fast_realloc()
*/
void *av_realloc(void *ptr, size_t size) av_alloc_size(2);
/**
* Free a memory block which has been allocated with av_malloc(z)() or
* av_realloc().
* @param ptr Pointer to the memory block which should be freed.
* @note ptr = NULL is explicitly allowed.
* @note It is recommended that you use av_freep() instead.
* @see av_freep()
*/
void av_free(void *ptr);
/**
* Allocate a block of size bytes with alignment suitable for all
* memory accesses (including vectors if available on the CPU) and
* zero all the bytes of the block.
* @param size Size in bytes for the memory block to be allocated.
* @return Pointer to the allocated block, NULL if it cannot be allocated.
* @see av_malloc()
*/
void *av_mallocz(size_t size) av_malloc_attrib av_alloc_size(1);
/**
* Duplicate the string s.
* @param s string to be duplicated
* @return Pointer to a newly allocated string containing a
* copy of s or NULL if the string cannot be allocated.
*/
char *av_strdup(const char *s) av_malloc_attrib;
/**
* Free a memory block which has been allocated with av_malloc(z)() or
* av_realloc() and set the pointer pointing to it to NULL.
* @param ptr Pointer to the pointer to the memory block which should
* be freed.
* @see av_free()
*/
void av_freep(void *ptr);
/**
* Add an element to a dynamic array.
*
* @param tab_ptr Pointer to the array.
* @param nb_ptr Pointer to the number of elements in the array.
* @param elem Element to be added.
*/
void av_dynarray_add(void *tab_ptr, int *nb_ptr, void *elem);
#endif /* AVUTIL_MEM_H */
/*
* AVOptions
* copyright (c) 2005 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_OPT_H
#define AVUTIL_OPT_H
/**
* @file
* AVOptions
*/
#include "rational.h"
#include "avutil.h"
#include "dict.h"
enum AVOptionType{
FF_OPT_TYPE_FLAGS,
FF_OPT_TYPE_INT,
FF_OPT_TYPE_INT64,
FF_OPT_TYPE_DOUBLE,
FF_OPT_TYPE_FLOAT,
FF_OPT_TYPE_STRING,
FF_OPT_TYPE_RATIONAL,
FF_OPT_TYPE_BINARY, ///< offset must point to a pointer immediately followed by an int for the length
FF_OPT_TYPE_CONST=128,
};
/**
* AVOption
*/
typedef struct AVOption {
const char *name;
/**
* short English help text
* @todo What about other languages?
*/
const char *help;
/**
* The offset relative to the context structure where the option
* value is stored. It should be 0 for named constants.
*/
int offset;
enum AVOptionType type;
/**
* the default value for scalar options
*/
union {
double dbl;
const char *str;
/* TODO those are unused now */
int64_t i64;
AVRational q;
} default_val;
double min; ///< minimum valid value for the option
double max; ///< maximum valid value for the option
int flags;
#define AV_OPT_FLAG_ENCODING_PARAM 1 ///< a generic parameter which can be set by the user for muxing or encoding
#define AV_OPT_FLAG_DECODING_PARAM 2 ///< a generic parameter which can be set by the user for demuxing or decoding
#define AV_OPT_FLAG_METADATA 4 ///< some data extracted or inserted into the file like title, comment, ...
#define AV_OPT_FLAG_AUDIO_PARAM 8
#define AV_OPT_FLAG_VIDEO_PARAM 16
#define AV_OPT_FLAG_SUBTITLE_PARAM 32
//FIXME think about enc-audio, ... style flags
/**
* The logical unit to which the option belongs. Non-constant
* options and corresponding named constants share the same
* unit. May be NULL.
*/
const char *unit;
} AVOption;
#if FF_API_FIND_OPT
/**
* Look for an option in obj. Look only for the options which
* have the flags set as specified in mask and flags (that is,
* for which it is the case that opt->flags & mask == flags).
*
* @param[in] obj a pointer to a struct whose first element is a
* pointer to an AVClass
* @param[in] name the name of the option to look for
* @param[in] unit the unit of the option to look for, or any if NULL
* @return a pointer to the option found, or NULL if no option
* has been found
*
* @deprecated use av_opt_find.
*/
attribute_deprecated
const AVOption *av_find_opt(void *obj, const char *name, const char *unit, int mask, int flags);
#endif
/**
* Set the field of obj with the given name to value.
*
* @param[in] obj A struct whose first element is a pointer to an
* AVClass.
* @param[in] name the name of the field to set
* @param[in] val The value to set. If the field is not of a string
* type, then the given string is parsed.
* SI postfixes and some named scalars are supported.
* If the field is of a numeric type, it has to be a numeric or named
* scalar. Behavior with more than one scalar and +- infix operators
* is undefined.
* If the field is of a flags type, it has to be a sequence of numeric
* scalars or named flags separated by '+' or '-'. Prefixing a flag
* with '+' causes it to be set without affecting the other flags;
* similarly, '-' unsets a flag.
* @param[out] o_out if non-NULL put here a pointer to the AVOption
* found
* @param alloc when 1 then the old value will be av_freed() and the
* new av_strduped()
* when 0 then no av_free() nor av_strdup() will be used
* @return 0 if the value has been set, or an AVERROR code in case of
* error:
* AVERROR(ENOENT) if no matching option exists
* AVERROR(ERANGE) if the value is out of range
* AVERROR(EINVAL) if the value is not valid
*/
int av_set_string3(void *obj, const char *name, const char *val, int alloc, const AVOption **o_out);
const AVOption *av_set_double(void *obj, const char *name, double n);
const AVOption *av_set_q(void *obj, const char *name, AVRational n);
const AVOption *av_set_int(void *obj, const char *name, int64_t n);
double av_get_double(void *obj, const char *name, const AVOption **o_out);
AVRational av_get_q(void *obj, const char *name, const AVOption **o_out);
int64_t av_get_int(void *obj, const char *name, const AVOption **o_out);
const char *av_get_string(void *obj, const char *name, const AVOption **o_out, char *buf, int buf_len);
const AVOption *av_next_option(void *obj, const AVOption *last);
/**
* Show the obj options.
*
* @param req_flags requested flags for the options to show. Show only the
* options for which it is opt->flags & req_flags.
* @param rej_flags rejected flags for the options to show. Show only the
* options for which it is !(opt->flags & req_flags).
* @param av_log_obj log context to use for showing the options
*/
int av_opt_show2(void *obj, void *av_log_obj, int req_flags, int rej_flags);
void av_opt_set_defaults(void *s);
void av_opt_set_defaults2(void *s, int mask, int flags);
/**
* Parse the key/value pairs list in opts. For each key/value pair
* found, stores the value in the field in ctx that is named like the
* key. ctx must be an AVClass context, storing is done using
* AVOptions.
*
* @param opts options string to parse, may be NULL
* @param key_val_sep a 0-terminated list of characters used to
* separate key from value
* @param pairs_sep a 0-terminated list of characters used to separate
* two pairs from each other
* @return the number of successfully set key/value pairs, or a negative
* value corresponding to an AVERROR code in case of error:
* AVERROR(EINVAL) if opts cannot be parsed,
* the error code issued by av_set_string3() if a key/value pair
* cannot be set
*/
int av_set_options_string(void *ctx, const char *opts,
const char *key_val_sep, const char *pairs_sep);
/**
* Free all string and binary options in obj.
*/
void av_opt_free(void *obj);
/**
* Check whether a particular flag is set in a flags field.
*
* @param field_name the name of the flag field option
* @param flag_name the name of the flag to check
* @return non-zero if the flag is set, zero if the flag isn't set,
* isn't of the right type, or the flags field doesn't exist.
*/
int av_opt_flag_is_set(void *obj, const char *field_name, const char *flag_name);
/*
* Set all the options from a given dictionary on an object.
*
* @param obj a struct whose first element is a pointer to AVClass
* @param options options to process. This dictionary will be freed and replaced
* by a new one containing all options not found in obj.
* Of course this new dictionary needs to be freed by caller
* with av_dict_free().
*
* @return 0 on success, a negative AVERROR if some option was found in obj,
* but could not be set.
*
* @see av_dict_copy()
*/
int av_opt_set_dict(void *obj, struct AVDictionary **options);
#define AV_OPT_SEARCH_CHILDREN 0x0001 /**< Search in possible children of the
given object first. */
/**
* Look for an option in an object. Consider only options which
* have all the specified flags set.
*
* @param[in] obj A pointer to a struct whose first element is a
* pointer to an AVClass.
* @param[in] name The name of the option to look for.
* @param[in] unit When searching for named constants, name of the unit
* it belongs to.
* @param opt_flags Find only options with all the specified flags set (AV_OPT_FLAG).
* @param search_flags A combination of AV_OPT_SEARCH_*.
*
* @return A pointer to the option found, or NULL if no option
* was found.
*
* @note Options found with AV_OPT_SEARCH_CHILDREN flag may not be settable
* directly with av_set_string3(). Use special calls which take an options
* AVDictionary (e.g. avformat_open_input()) to set options found with this
* flag.
*/
const AVOption *av_opt_find(void *obj, const char *name, const char *unit,
int opt_flags, int search_flags);
#endif /* AVUTIL_OPT_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_PARSEUTILS_H
#define AVUTIL_PARSEUTILS_H
#include "rational.h"
/**
* @file
* misc parsing utilities
*/
/**
* Parse str and put in width_ptr and height_ptr the detected values.
*
* @param[in,out] width_ptr pointer to the variable which will contain the detected
* width value
* @param[in,out] height_ptr pointer to the variable which will contain the detected
* height value
* @param[in] str the string to parse: it has to be a string in the format
* width x height or a valid video size abbreviation.
* @return >= 0 on success, a negative error code otherwise
*/
int av_parse_video_size(int *width_ptr, int *height_ptr, const char *str);
/**
* Parse str and store the detected values in *rate.
*
* @param[in,out] rate pointer to the AVRational which will contain the detected
* frame rate
* @param[in] str the string to parse: it has to be a string in the format
* rate_num / rate_den, a float number or a valid video rate abbreviation
* @return >= 0 on success, a negative error code otherwise
*/
int av_parse_video_rate(AVRational *rate, const char *str);
/**
* Put the RGBA values that correspond to color_string in rgba_color.
*
* @param color_string a string specifying a color. It can be the name of
* a color (case insensitive match) or a [0x|#]RRGGBB[AA] sequence,
* possibly followed by "@" and a string representing the alpha
* component.
* The alpha component may be a string composed by "0x" followed by an
* hexadecimal number or a decimal number between 0.0 and 1.0, which
* represents the opacity value (0x00/0.0 means completely transparent,
* 0xff/1.0 completely opaque).
* If the alpha component is not specified then 0xff is assumed.
* The string "random" will result in a random color.
* @param slen length of the initial part of color_string containing the
* color. It can be set to -1 if color_string is a null terminated string
* containing nothing else than the color.
* @return >= 0 in case of success, a negative value in case of
* failure (for example if color_string cannot be parsed).
*/
int av_parse_color(uint8_t *rgba_color, const char *color_string, int slen,
void *log_ctx);
/**
* Parses timestr and returns in *time a corresponding number of
* microseconds.
*
* @param timeval puts here the number of microseconds corresponding
* to the string in timestr. If the string represents a duration, it
* is the number of microseconds contained in the time interval. If
* the string is a date, is the number of microseconds since 1st of
* January, 1970 up to the time of the parsed date. If timestr cannot
* be successfully parsed, set *time to INT64_MIN.
* @param datestr a string representing a date or a duration.
* - If a date the syntax is:
* @code
* [{YYYY-MM-DD|YYYYMMDD}[T|t| ]]{{HH[:MM[:SS[.m...]]]}|{HH[MM[SS[.m...]]]}}[Z]
* now
* @endcode
* If the value is "now" it takes the current time.
* Time is local time unless Z is appended, in which case it is
* interpreted as UTC.
* If the year-month-day part is not specified it takes the current
* year-month-day.
* - If a duration the syntax is:
* @code
* [-]HH[:MM[:SS[.m...]]]
* [-]S+[.m...]
* @endcode
* @param duration flag which tells how to interpret timestr, if not
* zero timestr is interpreted as a duration, otherwise as a date
* @return 0 in case of success, a negative value corresponding to an
* AVERROR code otherwise
*/
int av_parse_time(int64_t *timeval, const char *timestr, int duration);
/**
* Attempt to find a specific tag in a URL.
*
* syntax: '?tag1=val1&tag2=val2...'. Little URL decoding is done.
* Return 1 if found.
*/
int av_find_info_tag(char *arg, int arg_size, const char *tag1, const char *info);
#endif /* AVUTIL_PARSEUTILS_H */
/*
* pixel format descriptor
* Copyright (c) 2009 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_PIXDESC_H
#define AVUTIL_PIXDESC_H
#include <inttypes.h>
#include "pixfmt.h"
typedef struct AVComponentDescriptor{
uint16_t plane :2; ///< which of the 4 planes contains the component
/**
* Number of elements between 2 horizontally consecutive pixels minus 1.
* Elements are bits for bitstream formats, bytes otherwise.
*/
uint16_t step_minus1 :3;
/**
* Number of elements before the component of the first pixel plus 1.
* Elements are bits for bitstream formats, bytes otherwise.
*/
uint16_t offset_plus1 :3;
uint16_t shift :3; ///< number of least significant bits that must be shifted away to get the value
uint16_t depth_minus1 :4; ///< number of bits in the component minus 1
}AVComponentDescriptor;
/**
* Descriptor that unambiguously describes how the bits of a pixel are
* stored in the up to 4 data planes of an image. It also stores the
* subsampling factors and number of components.
*
* @note This is separate of the colorspace (RGB, YCbCr, YPbPr, JPEG-style YUV
* and all the YUV variants) AVPixFmtDescriptor just stores how values
* are stored not what these values represent.
*/
typedef struct AVPixFmtDescriptor{
const char *name;
uint8_t nb_components; ///< The number of components each pixel has, (1-4)
/**
* Amount to shift the luma width right to find the chroma width.
* For YV12 this is 1 for example.
* chroma_width = -((-luma_width) >> log2_chroma_w)
* The note above is needed to ensure rounding up.
* This value only refers to the chroma components.
*/
uint8_t log2_chroma_w; ///< chroma_width = -((-luma_width )>>log2_chroma_w)
/**
* Amount to shift the luma height right to find the chroma height.
* For YV12 this is 1 for example.
* chroma_height= -((-luma_height) >> log2_chroma_h)
* The note above is needed to ensure rounding up.
* This value only refers to the chroma components.
*/
uint8_t log2_chroma_h;
uint8_t flags;
/**
* Parameters that describe how pixels are packed. If the format
* has chroma components, they must be stored in comp[1] and
* comp[2].
*/
AVComponentDescriptor comp[4];
}AVPixFmtDescriptor;
#define PIX_FMT_BE 1 ///< Pixel format is big-endian.
#define PIX_FMT_PAL 2 ///< Pixel format has a palette in data[1], values are indexes in this palette.
#define PIX_FMT_BITSTREAM 4 ///< All values of a component are bit-wise packed end to end.
#define PIX_FMT_HWACCEL 8 ///< Pixel format is an HW accelerated format.
/**
* The array of all the pixel format descriptors.
*/
extern const AVPixFmtDescriptor av_pix_fmt_descriptors[];
/**
* Read a line from an image, and write the values of the
* pixel format component c to dst.
*
* @param data the array containing the pointers to the planes of the image
* @param linesize the array containing the linesizes of the image
* @param desc the pixel format descriptor for the image
* @param x the horizontal coordinate of the first pixel to read
* @param y the vertical coordinate of the first pixel to read
* @param w the width of the line to read, that is the number of
* values to write to dst
* @param read_pal_component if not zero and the format is a paletted
* format writes the values corresponding to the palette
* component c in data[1] to dst, rather than the palette indexes in
* data[0]. The behavior is undefined if the format is not paletted.
*/
void av_read_image_line(uint16_t *dst, const uint8_t *data[4], const int linesize[4],
const AVPixFmtDescriptor *desc, int x, int y, int c, int w, int read_pal_component);
/**
* Write the values from src to the pixel format component c of an
* image line.
*
* @param src array containing the values to write
* @param data the array containing the pointers to the planes of the
* image to write into. It is supposed to be zeroed.
* @param linesize the array containing the linesizes of the image
* @param desc the pixel format descriptor for the image
* @param x the horizontal coordinate of the first pixel to write
* @param y the vertical coordinate of the first pixel to write
* @param w the width of the line to write, that is the number of
* values to write to the image line
*/
void av_write_image_line(const uint16_t *src, uint8_t *data[4], const int linesize[4],
const AVPixFmtDescriptor *desc, int x, int y, int c, int w);
/**
* Return the pixel format corresponding to name.
*
* If there is no pixel format with name name, then looks for a
* pixel format with the name corresponding to the native endian
* format of name.
* For example in a little-endian system, first looks for "gray16",
* then for "gray16le".
*
* Finally if no pixel format has been found, returns PIX_FMT_NONE.
*/
enum PixelFormat av_get_pix_fmt(const char *name);
/**
* Return the short name for a pixel format, NULL in case pix_fmt is
* unknown.
*
* @see av_get_pix_fmt(), av_get_pix_fmt_string()
*/
const char *av_get_pix_fmt_name(enum PixelFormat pix_fmt);
/**
* Print in buf the string corresponding to the pixel format with
* number pix_fmt, or an header if pix_fmt is negative.
*
* @param buf the buffer where to write the string
* @param buf_size the size of buf
* @param pix_fmt the number of the pixel format to print the
* corresponding info string, or a negative value to print the
* corresponding header.
*/
char *av_get_pix_fmt_string (char *buf, int buf_size, enum PixelFormat pix_fmt);
/**
* Return the number of bits per pixel used by the pixel format
* described by pixdesc.
*
* The returned number of bits refers to the number of bits actually
* used for storing the pixel information, that is padding bits are
* not counted.
*/
int av_get_bits_per_pixel(const AVPixFmtDescriptor *pixdesc);
#endif /* AVUTIL_PIXDESC_H */
/*
* copyright (c) 2006 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_PIXFMT_H
#define AVUTIL_PIXFMT_H
/**
* @file
* pixel format definitions
*
* @warning This file has to be considered an internal but installed
* header, so it should not be directly included in your projects.
*/
#include "libavutil/avconfig.h"
/**
* Pixel format. Notes:
*
* PIX_FMT_RGB32 is handled in an endian-specific manner. An RGBA
* color is put together as:
* (A << 24) | (R << 16) | (G << 8) | B
* This is stored as BGRA on little-endian CPU architectures and ARGB on
* big-endian CPUs.
*
* When the pixel format is palettized RGB (PIX_FMT_PAL8), the palettized
* image data is stored in AVFrame.data[0]. The palette is transported in
* AVFrame.data[1], is 1024 bytes long (256 4-byte entries) and is
* formatted the same as in PIX_FMT_RGB32 described above (i.e., it is
* also endian-specific). Note also that the individual RGB palette
* components stored in AVFrame.data[1] should be in the range 0..255.
* This is important as many custom PAL8 video codecs that were designed
* to run on the IBM VGA graphics adapter use 6-bit palette components.
*
* For all the 8bit per pixel formats, an RGB32 palette is in data[1] like
* for pal8. This palette is filled in automatically by the function
* allocating the picture.
*
* Note, make sure that all newly added big endian formats have pix_fmt&1==1
* and that all newly added little endian formats have pix_fmt&1==0
* this allows simpler detection of big vs little endian.
*/
enum PixelFormat {
PIX_FMT_NONE= -1,
PIX_FMT_YUV420P, ///< planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
PIX_FMT_YUYV422, ///< packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
PIX_FMT_RGB24, ///< packed RGB 8:8:8, 24bpp, RGBRGB...
PIX_FMT_BGR24, ///< packed RGB 8:8:8, 24bpp, BGRBGR...
PIX_FMT_YUV422P, ///< planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
PIX_FMT_YUV444P, ///< planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
PIX_FMT_YUV410P, ///< planar YUV 4:1:0, 9bpp, (1 Cr & Cb sample per 4x4 Y samples)
PIX_FMT_YUV411P, ///< planar YUV 4:1:1, 12bpp, (1 Cr & Cb sample per 4x1 Y samples)
PIX_FMT_GRAY8, ///< Y , 8bpp
PIX_FMT_MONOWHITE, ///< Y , 1bpp, 0 is white, 1 is black, in each byte pixels are ordered from the msb to the lsb
PIX_FMT_MONOBLACK, ///< Y , 1bpp, 0 is black, 1 is white, in each byte pixels are ordered from the msb to the lsb
PIX_FMT_PAL8, ///< 8 bit with PIX_FMT_RGB32 palette
PIX_FMT_YUVJ420P, ///< planar YUV 4:2:0, 12bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV420P and setting color_range
PIX_FMT_YUVJ422P, ///< planar YUV 4:2:2, 16bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV422P and setting color_range
PIX_FMT_YUVJ444P, ///< planar YUV 4:4:4, 24bpp, full scale (JPEG), deprecated in favor of PIX_FMT_YUV444P and setting color_range
PIX_FMT_XVMC_MPEG2_MC,///< XVideo Motion Acceleration via common packet passing
PIX_FMT_XVMC_MPEG2_IDCT,
PIX_FMT_UYVY422, ///< packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
PIX_FMT_UYYVYY411, ///< packed YUV 4:1:1, 12bpp, Cb Y0 Y1 Cr Y2 Y3
PIX_FMT_BGR8, ///< packed RGB 3:3:2, 8bpp, (msb)2B 3G 3R(lsb)
PIX_FMT_BGR4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1B 2G 1R(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
PIX_FMT_BGR4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1B 2G 1R(lsb)
PIX_FMT_RGB8, ///< packed RGB 3:3:2, 8bpp, (msb)2R 3G 3B(lsb)
PIX_FMT_RGB4, ///< packed RGB 1:2:1 bitstream, 4bpp, (msb)1R 2G 1B(lsb), a byte contains two pixels, the first pixel in the byte is the one composed by the 4 msb bits
PIX_FMT_RGB4_BYTE, ///< packed RGB 1:2:1, 8bpp, (msb)1R 2G 1B(lsb)
PIX_FMT_NV12, ///< planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (first byte U and the following byte V)
PIX_FMT_NV21, ///< as above, but U and V bytes are swapped
PIX_FMT_ARGB, ///< packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
PIX_FMT_RGBA, ///< packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
PIX_FMT_ABGR, ///< packed ABGR 8:8:8:8, 32bpp, ABGRABGR...
PIX_FMT_BGRA, ///< packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
PIX_FMT_GRAY16BE, ///< Y , 16bpp, big-endian
PIX_FMT_GRAY16LE, ///< Y , 16bpp, little-endian
PIX_FMT_YUV440P, ///< planar YUV 4:4:0 (1 Cr & Cb sample per 1x2 Y samples)
PIX_FMT_YUVJ440P, ///< planar YUV 4:4:0 full scale (JPEG), deprecated in favor of PIX_FMT_YUV440P and setting color_range
PIX_FMT_YUVA420P, ///< planar YUV 4:2:0, 20bpp, (1 Cr & Cb sample per 2x2 Y & A samples)
PIX_FMT_VDPAU_H264,///< H.264 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
PIX_FMT_VDPAU_MPEG1,///< MPEG-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
PIX_FMT_VDPAU_MPEG2,///< MPEG-2 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
PIX_FMT_VDPAU_WMV3,///< WMV3 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
PIX_FMT_VDPAU_VC1, ///< VC-1 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
PIX_FMT_RGB48BE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as big-endian
PIX_FMT_RGB48LE, ///< packed RGB 16:16:16, 48bpp, 16R, 16G, 16B, the 2-byte value for each R/G/B component is stored as little-endian
PIX_FMT_RGB565BE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian
PIX_FMT_RGB565LE, ///< packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian
PIX_FMT_RGB555BE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), big-endian, most significant bit to 0
PIX_FMT_RGB555LE, ///< packed RGB 5:5:5, 16bpp, (msb)1A 5R 5G 5B(lsb), little-endian, most significant bit to 0
PIX_FMT_BGR565BE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), big-endian
PIX_FMT_BGR565LE, ///< packed BGR 5:6:5, 16bpp, (msb) 5B 6G 5R(lsb), little-endian
PIX_FMT_BGR555BE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), big-endian, most significant bit to 1
PIX_FMT_BGR555LE, ///< packed BGR 5:5:5, 16bpp, (msb)1A 5B 5G 5R(lsb), little-endian, most significant bit to 1
PIX_FMT_VAAPI_MOCO, ///< HW acceleration through VA API at motion compensation entry-point, Picture.data[3] contains a vaapi_render_state struct which contains macroblocks as well as various fields extracted from headers
PIX_FMT_VAAPI_IDCT, ///< HW acceleration through VA API at IDCT entry-point, Picture.data[3] contains a vaapi_render_state struct which contains fields extracted from headers
PIX_FMT_VAAPI_VLD, ///< HW decoding through VA API, Picture.data[3] contains a vaapi_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
PIX_FMT_YUV420P16LE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
PIX_FMT_YUV420P16BE, ///< planar YUV 4:2:0, 24bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
PIX_FMT_YUV422P16LE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
PIX_FMT_YUV422P16BE, ///< planar YUV 4:2:2, 32bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
PIX_FMT_YUV444P16LE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
PIX_FMT_YUV444P16BE, ///< planar YUV 4:4:4, 48bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
PIX_FMT_VDPAU_MPEG4, ///< MPEG4 HW decoding with VDPAU, data[0] contains a vdpau_render_state struct which contains the bitstream of the slices as well as various fields extracted from headers
PIX_FMT_DXVA2_VLD, ///< HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer
PIX_FMT_RGB444LE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), little-endian, most significant bits to 0
PIX_FMT_RGB444BE, ///< packed RGB 4:4:4, 16bpp, (msb)4A 4R 4G 4B(lsb), big-endian, most significant bits to 0
PIX_FMT_BGR444LE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), little-endian, most significant bits to 1
PIX_FMT_BGR444BE, ///< packed BGR 4:4:4, 16bpp, (msb)4A 4B 4G 4R(lsb), big-endian, most significant bits to 1
PIX_FMT_GRAY8A, ///< 8bit gray, 8bit alpha
PIX_FMT_BGR48BE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as big-endian
PIX_FMT_BGR48LE, ///< packed RGB 16:16:16, 48bpp, 16B, 16G, 16R, the 2-byte value for each R/G/B component is stored as little-endian
//the following 10 formats have the disadvantage of needing 1 format for each bit depth, thus
//If you want to support multiple bit depths, then using PIX_FMT_YUV420P16* with the bpp stored seperately
//is better
PIX_FMT_YUV420P9BE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
PIX_FMT_YUV420P9LE, ///< planar YUV 4:2:0, 13.5bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
PIX_FMT_YUV420P10BE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), big-endian
PIX_FMT_YUV420P10LE,///< planar YUV 4:2:0, 15bpp, (1 Cr & Cb sample per 2x2 Y samples), little-endian
PIX_FMT_YUV422P10BE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), little-endian
PIX_FMT_YUV422P10LE,///< planar YUV 4:2:2, 20bpp, (1 Cr & Cb sample per 2x1 Y samples), big-endian
PIX_FMT_YUV444P9BE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
PIX_FMT_YUV444P9LE, ///< planar YUV 4:4:4, 27bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
PIX_FMT_YUV444P10BE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), little-endian
PIX_FMT_YUV444P10LE,///< planar YUV 4:4:4, 30bpp, (1 Cr & Cb sample per 1x1 Y samples), big-endian
PIX_FMT_NB, ///< number of pixel formats, DO NOT USE THIS if you want to link with shared libav* because the number of formats might differ between versions
};
#define PIX_FMT_Y400A PIX_FMT_GRAY8A
#if AV_HAVE_BIGENDIAN
# define PIX_FMT_NE(be, le) PIX_FMT_##be
#else
# define PIX_FMT_NE(be, le) PIX_FMT_##le
#endif
#define PIX_FMT_RGB32 PIX_FMT_NE(ARGB, BGRA)
#define PIX_FMT_RGB32_1 PIX_FMT_NE(RGBA, ABGR)
#define PIX_FMT_BGR32 PIX_FMT_NE(ABGR, RGBA)
#define PIX_FMT_BGR32_1 PIX_FMT_NE(BGRA, ARGB)
#define PIX_FMT_GRAY16 PIX_FMT_NE(GRAY16BE, GRAY16LE)
#define PIX_FMT_RGB48 PIX_FMT_NE(RGB48BE, RGB48LE)
#define PIX_FMT_RGB565 PIX_FMT_NE(RGB565BE, RGB565LE)
#define PIX_FMT_RGB555 PIX_FMT_NE(RGB555BE, RGB555LE)
#define PIX_FMT_RGB444 PIX_FMT_NE(RGB444BE, RGB444LE)
#define PIX_FMT_BGR48 PIX_FMT_NE(BGR48BE, BGR48LE)
#define PIX_FMT_BGR565 PIX_FMT_NE(BGR565BE, BGR565LE)
#define PIX_FMT_BGR555 PIX_FMT_NE(BGR555BE, BGR555LE)
#define PIX_FMT_BGR444 PIX_FMT_NE(BGR444BE, BGR444LE)
#define PIX_FMT_YUV420P9 PIX_FMT_NE(YUV420P9BE , YUV420P9LE)
#define PIX_FMT_YUV444P9 PIX_FMT_NE(YUV444P9BE , YUV444P9LE)
#define PIX_FMT_YUV420P10 PIX_FMT_NE(YUV420P10BE, YUV420P10LE)
#define PIX_FMT_YUV422P10 PIX_FMT_NE(YUV422P10BE, YUV422P10LE)
#define PIX_FMT_YUV444P10 PIX_FMT_NE(YUV444P10BE, YUV444P10LE)
#define PIX_FMT_YUV420P16 PIX_FMT_NE(YUV420P16BE, YUV420P16LE)
#define PIX_FMT_YUV422P16 PIX_FMT_NE(YUV422P16BE, YUV422P16LE)
#define PIX_FMT_YUV444P16 PIX_FMT_NE(YUV444P16BE, YUV444P16LE)
#endif /* AVUTIL_PIXFMT_H */
/*
* Copyright (c) 2009 Baptiste Coudurier <baptiste.coudurier@gmail.com>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_RANDOM_SEED_H
#define AVUTIL_RANDOM_SEED_H
#include <stdint.h>
/**
* Get a seed to use in conjunction with random functions.
*/
uint32_t av_get_random_seed(void);
#endif /* AVUTIL_RANDOM_SEED_H */
/*
* rational numbers
* Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/**
* @file
* rational numbers
* @author Michael Niedermayer <michaelni@gmx.at>
*/
#ifndef AVUTIL_RATIONAL_H
#define AVUTIL_RATIONAL_H
#include <stdint.h>
#include <limits.h>
#include "attributes.h"
/**
* rational number numerator/denominator
*/
typedef struct AVRational{
int num; ///< numerator
int den; ///< denominator
} AVRational;
/**
* Compare two rationals.
* @param a first rational
* @param b second rational
* @return 0 if a==b, 1 if a>b, -1 if a<b, and INT_MIN if one of the
* values is of the form 0/0
*/
static inline int av_cmp_q(AVRational a, AVRational b){
const int64_t tmp= a.num * (int64_t)b.den - b.num * (int64_t)a.den;
if(tmp) return ((tmp ^ a.den ^ b.den)>>63)|1;
else if(b.den && a.den) return 0;
else if(a.num && b.num) return (a.num>>31) - (b.num>>31);
else return INT_MIN;
}
/**
* Convert rational to double.
* @param a rational to convert
* @return (double) a
*/
static inline double av_q2d(AVRational a){
return a.num / (double) a.den;
}
/**
* Reduce a fraction.
* This is useful for framerate calculations.
* @param dst_num destination numerator
* @param dst_den destination denominator
* @param num source numerator
* @param den source denominator
* @param max the maximum allowed for dst_num & dst_den
* @return 1 if exact, 0 otherwise
*/
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max);
/**
* Multiply two rationals.
* @param b first rational
* @param c second rational
* @return b*c
*/
AVRational av_mul_q(AVRational b, AVRational c) av_const;
/**
* Divide one rational by another.
* @param b first rational
* @param c second rational
* @return b/c
*/
AVRational av_div_q(AVRational b, AVRational c) av_const;
/**
* Add two rationals.
* @param b first rational
* @param c second rational
* @return b+c
*/
AVRational av_add_q(AVRational b, AVRational c) av_const;
/**
* Subtract one rational from another.
* @param b first rational
* @param c second rational
* @return b-c
*/
AVRational av_sub_q(AVRational b, AVRational c) av_const;
/**
* Convert a double precision floating point number to a rational.
* inf is expressed as {1,0} or {-1,0} depending on the sign.
*
* @param d double to convert
* @param max the maximum allowed numerator and denominator
* @return (AVRational) d
*/
AVRational av_d2q(double d, int max) av_const;
/**
* @return 1 if q1 is nearer to q than q2, -1 if q2 is nearer
* than q1, 0 if they have the same distance.
*/
int av_nearer_q(AVRational q, AVRational q1, AVRational q2);
/**
* Find the nearest value in q_list to q.
* @param q_list an array of rationals terminated by {0, 0}
* @return the index of the nearest value found in the array
*/
int av_find_nearest_q_idx(AVRational q, const AVRational* q_list);
#endif /* AVUTIL_RATIONAL_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_SAMPLEFMT_H
#define AVUTIL_SAMPLEFMT_H
#include "avutil.h"
/**
* all in native-endian format
*/
enum AVSampleFormat {
AV_SAMPLE_FMT_NONE = -1,
AV_SAMPLE_FMT_U8, ///< unsigned 8 bits
AV_SAMPLE_FMT_S16, ///< signed 16 bits
AV_SAMPLE_FMT_S32, ///< signed 32 bits
AV_SAMPLE_FMT_FLT, ///< float
AV_SAMPLE_FMT_DBL, ///< double
AV_SAMPLE_FMT_NB ///< Number of sample formats. DO NOT USE if linking dynamically
};
/**
* Return the name of sample_fmt, or NULL if sample_fmt is not
* recognized.
*/
const char *av_get_sample_fmt_name(enum AVSampleFormat sample_fmt);
/**
* Return a sample format corresponding to name, or AV_SAMPLE_FMT_NONE
* on error.
*/
enum AVSampleFormat av_get_sample_fmt(const char *name);
/**
* Generate a string corresponding to the sample format with
* sample_fmt, or a header if sample_fmt is negative.
*
* @param buf the buffer where to write the string
* @param buf_size the size of buf
* @param sample_fmt the number of the sample format to print the
* corresponding info string, or a negative value to print the
* corresponding header.
* @return the pointer to the filled buffer or NULL if sample_fmt is
* unknown or in case of other errors
*/
char *av_get_sample_fmt_string(char *buf, int buf_size, enum AVSampleFormat sample_fmt);
#if FF_API_GET_BITS_PER_SAMPLE_FMT
/**
* @deprecated Use av_get_bytes_per_sample() instead.
*/
attribute_deprecated
int av_get_bits_per_sample_fmt(enum AVSampleFormat sample_fmt);
#endif
/**
* Return number of bytes per sample.
*
* @param sample_fmt the sample format
* @return number of bytes per sample or zero if unknown for the given
* sample format
*/
int av_get_bytes_per_sample(enum AVSampleFormat sample_fmt);
/**
* Fill channel data pointers and linesizes for samples with sample
* format sample_fmt.
*
* The pointers array is filled with the pointers to the samples data:
* for planar, set the start point of each plane's data within the buffer,
* for packed, set the start point of the entire buffer only.
*
* The linesize array is filled with the aligned size of each samples
* plane, that is linesize[i] will contain the linesize of the plane i,
* and will be zero for all the unused planes. All linesize values are
* equal.
*
* @param pointers array to be filled with the pointer for each plane, may be NULL
* @param linesizes array to be filled with the linesize, may be NULL
* @param buf the pointer to a buffer containing the samples
* @param nb_samples the number of samples in a single channel
* @param planar 1 if the samples layout is planar, 0 if it is packed
* @param nb_channels the number of channels
* @return the total size of the buffer, a negative
* error code in case of failure
*/
int av_samples_fill_arrays(uint8_t *pointers[8], int linesizes[8],
uint8_t *buf, int nb_channels, int nb_samples,
enum AVSampleFormat sample_fmt, int planar, int align);
/**
* Allocate a samples buffer for nb_samples samples, and
* fill pointers and linesizes accordingly.
* The allocated samples buffer has to be freed by using
* av_freep(&pointers[0]).
*
* @param nb_channels number of audio channels
* @param nb_samples number of samples per channel
* @param planar 1 if the samples layout is planar, 0 if packed,
* @param align the value to use for buffer size alignment
* @return the size in bytes required for the samples buffer, a negative
* error code in case of failure
* @see av_samples_fill_arrays()
*/
int av_samples_alloc(uint8_t *pointers[8], int linesizes[8],
int nb_channels, int nb_samples,
enum AVSampleFormat sample_fmt, int planar,
int align);
#endif /* AVCORE_SAMPLEFMT_H */
/*
* Copyright (C) 2007 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVUTIL_SHA_H
#define AVUTIL_SHA_H
#include <stdint.h>
extern const int av_sha_size;
struct AVSHA;
/**
* Initialize SHA-1 or SHA-2 hashing.
*
* @param context pointer to the function context (of size av_sha_size)
* @param bits number of bits in digest (SHA-1 - 160 bits, SHA-2 224 or 256 bits)
* @return zero if initialization succeeded, -1 otherwise
*/
int av_sha_init(struct AVSHA* context, int bits);
/**
* Update hash value.
*
* @param context hash function context
* @param data input data to update hash with
* @param len input data length
*/
void av_sha_update(struct AVSHA* context, const uint8_t* data, unsigned int len);
/**
* Finish hashing and output digest value.
*
* @param context hash function context
* @param digest buffer where output digest value is stored
*/
void av_sha_final(struct AVSHA* context, uint8_t *digest);
#endif /* AVUTIL_SHA_H */
/*
* Copyright (C) 2001-2003 Michael Niedermayer <michaelni@gmx.at>
*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef SWSCALE_SWSCALE_H
#define SWSCALE_SWSCALE_H
/**
* @file
* @brief
* external api for the swscale stuff
*/
#include "libavutil/avutil.h"
#define LIBSWSCALE_VERSION_MAJOR 2
#define LIBSWSCALE_VERSION_MINOR 0
#define LIBSWSCALE_VERSION_MICRO 0
#define LIBSWSCALE_VERSION_INT AV_VERSION_INT(LIBSWSCALE_VERSION_MAJOR, \
LIBSWSCALE_VERSION_MINOR, \
LIBSWSCALE_VERSION_MICRO)
#define LIBSWSCALE_VERSION AV_VERSION(LIBSWSCALE_VERSION_MAJOR, \
LIBSWSCALE_VERSION_MINOR, \
LIBSWSCALE_VERSION_MICRO)
#define LIBSWSCALE_BUILD LIBSWSCALE_VERSION_INT
#define LIBSWSCALE_IDENT "SwS" AV_STRINGIFY(LIBSWSCALE_VERSION)
/**
* Those FF_API_* defines are not part of public API.
* They may change, break or disappear at any time.
*/
#ifndef FF_API_SWS_GETCONTEXT
#define FF_API_SWS_GETCONTEXT (LIBSWSCALE_VERSION_MAJOR < 3)
#endif
#ifndef FF_API_SWS_CPU_CAPS
#define FF_API_SWS_CPU_CAPS (LIBSWSCALE_VERSION_MAJOR < 3)
#endif
#ifndef FF_API_SWS_FORMAT_NAME
#define FF_API_SWS_FORMAT_NAME (LIBSWSCALE_VERSION_MAJOR < 3)
#endif
/**
* Returns the LIBSWSCALE_VERSION_INT constant.
*/
unsigned swscale_version(void);
/**
* Returns the libswscale build-time configuration.
*/
const char *swscale_configuration(void);
/**
* Returns the libswscale license.
*/
const char *swscale_license(void);
/* values for the flags, the stuff on the command line is different */
#define SWS_FAST_BILINEAR 1
#define SWS_BILINEAR 2
#define SWS_BICUBIC 4
#define SWS_X 8
#define SWS_POINT 0x10
#define SWS_AREA 0x20
#define SWS_BICUBLIN 0x40
#define SWS_GAUSS 0x80
#define SWS_SINC 0x100
#define SWS_LANCZOS 0x200
#define SWS_SPLINE 0x400
#define SWS_SRC_V_CHR_DROP_MASK 0x30000
#define SWS_SRC_V_CHR_DROP_SHIFT 16
#define SWS_PARAM_DEFAULT 123456
#define SWS_PRINT_INFO 0x1000
//the following 3 flags are not completely implemented
//internal chrominace subsampling info
#define SWS_FULL_CHR_H_INT 0x2000
//input subsampling info
#define SWS_FULL_CHR_H_INP 0x4000
#define SWS_DIRECT_BGR 0x8000
#define SWS_ACCURATE_RND 0x40000
#define SWS_BITEXACT 0x80000
#if FF_API_SWS_CPU_CAPS
/**
* CPU caps are autodetected now, those flags
* are only provided for API compatibility.
*/
#define SWS_CPU_CAPS_MMX 0x80000000
#define SWS_CPU_CAPS_MMX2 0x20000000
#define SWS_CPU_CAPS_3DNOW 0x40000000
#define SWS_CPU_CAPS_ALTIVEC 0x10000000
#define SWS_CPU_CAPS_BFIN 0x01000000
#define SWS_CPU_CAPS_SSE2 0x02000000
#endif
#define SWS_MAX_REDUCE_CUTOFF 0.002
#define SWS_CS_ITU709 1
#define SWS_CS_FCC 4
#define SWS_CS_ITU601 5
#define SWS_CS_ITU624 5
#define SWS_CS_SMPTE170M 5
#define SWS_CS_SMPTE240M 7
#define SWS_CS_DEFAULT 5
/**
* Returns a pointer to yuv<->rgb coefficients for the given colorspace
* suitable for sws_setColorspaceDetails().
*
* @param colorspace One of the SWS_CS_* macros. If invalid,
* SWS_CS_DEFAULT is used.
*/
const int *sws_getCoefficients(int colorspace);
// when used for filters they must have an odd number of elements
// coeffs cannot be shared between vectors
typedef struct {
double *coeff; ///< pointer to the list of coefficients
int length; ///< number of coefficients in the vector
} SwsVector;
// vectors can be shared
typedef struct {
SwsVector *lumH;
SwsVector *lumV;
SwsVector *chrH;
SwsVector *chrV;
} SwsFilter;
struct SwsContext;
/**
* Returns a positive value if pix_fmt is a supported input format, 0
* otherwise.
*/
int sws_isSupportedInput(enum PixelFormat pix_fmt);
/**
* Returns a positive value if pix_fmt is a supported output format, 0
* otherwise.
*/
int sws_isSupportedOutput(enum PixelFormat pix_fmt);
/**
* Allocates an empty SwsContext. This must be filled and passed to
* sws_init_context(). For filling see AVOptions, options.c and
* sws_setColorspaceDetails().
*/
struct SwsContext *sws_alloc_context(void);
/**
* Initializes the swscaler context sws_context.
*
* @return zero or positive value on success, a negative value on
* error
*/
int sws_init_context(struct SwsContext *sws_context, SwsFilter *srcFilter, SwsFilter *dstFilter);
/**
* Frees the swscaler context swsContext.
* If swsContext is NULL, then does nothing.
*/
void sws_freeContext(struct SwsContext *swsContext);
#if FF_API_SWS_GETCONTEXT
/**
* Allocates and returns a SwsContext. You need it to perform
* scaling/conversion operations using sws_scale().
*
* @param srcW the width of the source image
* @param srcH the height of the source image
* @param srcFormat the source image format
* @param dstW the width of the destination image
* @param dstH the height of the destination image
* @param dstFormat the destination image format
* @param flags specify which algorithm and options to use for rescaling
* @return a pointer to an allocated context, or NULL in case of error
* @note this function is to be removed after a saner alternative is
* written
* @deprecated Use sws_getCachedContext() instead.
*/
struct SwsContext *sws_getContext(int srcW, int srcH, enum PixelFormat srcFormat,
int dstW, int dstH, enum PixelFormat dstFormat,
int flags, SwsFilter *srcFilter,
SwsFilter *dstFilter, const double *param);
#endif
/**
* Scales the image slice in srcSlice and puts the resulting scaled
* slice in the image in dst. A slice is a sequence of consecutive
* rows in an image.
*
* Slices have to be provided in sequential order, either in
* top-bottom or bottom-top order. If slices are provided in
* non-sequential order the behavior of the function is undefined.
*
* @param context the scaling context previously created with
* sws_getContext()
* @param srcSlice the array containing the pointers to the planes of
* the source slice
* @param srcStride the array containing the strides for each plane of
* the source image
* @param srcSliceY the position in the source image of the slice to
* process, that is the number (counted starting from
* zero) in the image of the first row of the slice
* @param srcSliceH the height of the source slice, that is the number
* of rows in the slice
* @param dst the array containing the pointers to the planes of
* the destination image
* @param dstStride the array containing the strides for each plane of
* the destination image
* @return the height of the output slice
*/
int sws_scale(struct SwsContext *context, const uint8_t* const srcSlice[], const int srcStride[],
int srcSliceY, int srcSliceH, uint8_t* const dst[], const int dstStride[]);
#if LIBSWSCALE_VERSION_MAJOR < 1
/**
* @deprecated Use sws_scale() instead.
*/
int sws_scale_ordered(struct SwsContext *context, const uint8_t* const src[],
int srcStride[], int srcSliceY, int srcSliceH,
uint8_t* dst[], int dstStride[]) attribute_deprecated;
#endif
/**
* @param inv_table the yuv2rgb coefficients, normally ff_yuv2rgb_coeffs[x]
* @param fullRange if 1 then the luma range is 0..255 if 0 it is 16..235
* @return -1 if not supported
*/
int sws_setColorspaceDetails(struct SwsContext *c, const int inv_table[4],
int srcRange, const int table[4], int dstRange,
int brightness, int contrast, int saturation);
/**
* @return -1 if not supported
*/
int sws_getColorspaceDetails(struct SwsContext *c, int **inv_table,
int *srcRange, int **table, int *dstRange,
int *brightness, int *contrast, int *saturation);
/**
* Allocates and returns an uninitialized vector with length coefficients.
*/
SwsVector *sws_allocVec(int length);
/**
* Returns a normalized Gaussian curve used to filter stuff
* quality=3 is high quality, lower is lower quality.
*/
SwsVector *sws_getGaussianVec(double variance, double quality);
/**
* Allocates and returns a vector with length coefficients, all
* with the same value c.
*/
SwsVector *sws_getConstVec(double c, int length);
/**
* Allocates and returns a vector with just one coefficient, with
* value 1.0.
*/
SwsVector *sws_getIdentityVec(void);
/**
* Scales all the coefficients of a by the scalar value.
*/
void sws_scaleVec(SwsVector *a, double scalar);
/**
* Scales all the coefficients of a so that their sum equals height.
*/
void sws_normalizeVec(SwsVector *a, double height);
void sws_convVec(SwsVector *a, SwsVector *b);
void sws_addVec(SwsVector *a, SwsVector *b);
void sws_subVec(SwsVector *a, SwsVector *b);
void sws_shiftVec(SwsVector *a, int shift);
/**
* Allocates and returns a clone of the vector a, that is a vector
* with the same coefficients as a.
*/
SwsVector *sws_cloneVec(SwsVector *a);
#if LIBSWSCALE_VERSION_MAJOR < 1
/**
* @deprecated Use sws_printVec2() instead.
*/
attribute_deprecated void sws_printVec(SwsVector *a);
#endif
/**
* Prints with av_log() a textual representation of the vector a
* if log_level <= av_log_level.
*/
void sws_printVec2(SwsVector *a, AVClass *log_ctx, int log_level);
void sws_freeVec(SwsVector *a);
SwsFilter *sws_getDefaultFilter(float lumaGBlur, float chromaGBlur,
float lumaSharpen, float chromaSharpen,
float chromaHShift, float chromaVShift,
int verbose);
void sws_freeFilter(SwsFilter *filter);
/**
* Checks if context can be reused, otherwise reallocates a new
* one.
*
* If context is NULL, just calls sws_getContext() to get a new
* context. Otherwise, checks if the parameters are the ones already
* saved in context. If that is the case, returns the current
* context. Otherwise, frees context and gets a new context with
* the new parameters.
*
* Be warned that srcFilter and dstFilter are not checked, they
* are assumed to remain the same.
*/
struct SwsContext *sws_getCachedContext(struct SwsContext *context,
int srcW, int srcH, enum PixelFormat srcFormat,
int dstW, int dstH, enum PixelFormat dstFormat,
int flags, SwsFilter *srcFilter,
SwsFilter *dstFilter, const double *param);
/**
* Converts an 8bit paletted frame into a frame with a color depth of 32-bits.
*
* The output frame will have the same packed format as the palette.
*
* @param src source frame buffer
* @param dst destination frame buffer
* @param num_pixels number of pixels to convert
* @param palette array with [256] entries, which must match color arrangement (RGB or BGR) of src
*/
void sws_convertPalette8ToPacked32(const uint8_t *src, uint8_t *dst, int num_pixels, const uint8_t *palette);
/**
* Converts an 8bit paletted frame into a frame with a color depth of 24 bits.
*
* With the palette format "ABCD", the destination frame ends up with the format "ABC".
*
* @param src source frame buffer
* @param dst destination frame buffer
* @param num_pixels number of pixels to convert
* @param palette array with [256] entries, which must match color arrangement (RGB or BGR) of src
*/
void sws_convertPalette8ToPacked24(const uint8_t *src, uint8_t *dst, int num_pixels, const uint8_t *palette);
#endif /* SWSCALE_SWSCALE_H */
#!/bin/bash
# As android can't use the .so.xx version of libs, we must change the SONAME
# of our libs to use the .so file
if [ -z $(which rpl) ]; then
echo "You must install rpl (sudo apt-get install rpl) before running this script"
exit 1
fi
cd $1
rpl -x.so -e libavcodec.so.53 "libavcodec.so\0\0\0" *
rpl -x.so -e libavdevice.so.53 "libavdevice.so\0\0\0" *
rpl -x.so -e libavfilter.so.2 "libavfilter.so\0\0" *
rpl -x.so -e libavformat.so.53 "libavformat.so\0\0\0" *
rpl -x.so -e libavutil.so.51 "libavutil.so\0\0\0" *
rpl -x.so -e libswscale.so.2 "libswscale.so\0\0" *
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := glfix
LOCAL_LDLIBS := -lGLESv2
LOCAL_SRC_FILES := fix-GLES20.c
LOCAL_CFLAGS += -DANDROID_NDK
include $(BUILD_SHARED_LIBRARY)
\ No newline at end of file
/*
* fix-GLES20.c
*
* Created on: Mar 18, 2012
* Author: "Dmytro Baryskyy"
*/
#include <jni.h>
#include <GLES2/gl2.h>
void Java_fix_android_opengl_GLES20_glVertexAttribPointer
(JNIEnv *env, jclass c, jint index, jint size, jint type, jboolean normalized, jint stride, jint offset)
{
glVertexAttribPointer(index, size, type, normalized, stride, (void*) offset);
}
void Java_fix_android_opengl_GLES20_glDrawElements
(JNIEnv *env, jclass c, jint mode, jint count, jint type, jint offset)
{
glDrawElements(mode, count, type, (void*) offset);
}
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeWidth="1"
android:strokeColor="#00000000">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#008577"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:orientation="vertical"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="horizontal">
<Button
android:id="@+id/btnIncR"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="+" />
<Button
android:id="@+id/btnDecR"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="-" />
<TextView
android:id="@+id/txtR"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:ems="10"
android:text="r" />
</LinearLayout>
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="horizontal">
<Button
android:id="@+id/btnIncTheta"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="+" />
<Button
android:id="@+id/btnDecTheta"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="-" />
<TextView
android:id="@+id/txtTheta"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:ems="10"
android:text="theta" />
</LinearLayout>
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="horizontal">
<Button
android:id="@+id/btnIncRho"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="+" />
<Button
android:id="@+id/btnDecRho"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="-" />
<TextView
android:id="@+id/txtRho"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:ems="10"
android:text="rho" />
</LinearLayout>
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#008577</color>
<color name="colorPrimaryDark">#00574B</color>
<color name="colorAccent">#D81B60</color>
</resources>
<resources>
<string name="app_name">eeloo</string>
</resources>
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>
package com.kerbol.eeloo;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}
\ No newline at end of file
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.4.0'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
#Fri May 03 17:09:08 KST 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
include ':app'
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment