In order to use this API, you will need those files:
UBUNTU
Location : API -> Ubuntu -> 32Bits/64Bits
WINDOWS
Location : API -> Windows -> Cpp
All examples are on the software package at this location: from the package's root, they are in the folder Documentation, sub folder API Examples.
Most of the time, it happens under Ubuntu and that usually means that you don't have the right privileges on the USB device(in our case, the robotical arm). The file should be in /dev/bus/usb/???/???. You can type the command lsusb to know where is the device file. Check for a device with vendor ID 22CD.
It is done by calling 2 functions: MoveHome and InitFingers. Call them in that order and your robotical arm will be ready to move and grab.
The Cartesian position and orientation returned by the API's functions are relative to the base reference frame. The origin is at the center and bottom of the base. The Z axis points upward, the X axis points to the left when facing the connectors and the Y axis is toward the connectors (See the DH Parameters document).
The fixed frame is the classic robot motion: when you move in translation, the orientation remains constant. The rotating frame is used as default. It's used in rehabilitation to obtain more intuitive motions: when you perform translations, the orientation changes to be more human like.
Convention XYZ
There are 4 mode of control available with the API: Cartesian position control, Cartesian velocity control, angular position control and angular velocity control.
Have a look at the example here : SetProtectionZone
Have a look at the example here : StartForceControl
#include <stdio.h> #include "Kinova.API.UsbCommandLayerUbuntu.h" #include "KinovaTypes.h" #include <dlfcn.h> int (*MyGetClientConfigurations)(ClientConfigurations &); int (*MyInitAPI)(); int (*MyCloseAPI)(); int main() { void* commandLayer_handle; commandLayer_handle = dlopen("Kinova.API.USBCommandLayerUbuntu.so",RTLD_NOW|RTLD_GLOBAL); if(commandLayer_handle == NULL) { printf("Error while loading library."); } else { printf("API loaded Successfully\n"); MyInitAPI = (int (*)()) dlsym(commandLayer_handle,"InitAPI"); if(MyInitAPI == NULL) { printf("InitAPI loading error\n"); } else { printf("InitAPI loaded Successfully\n"); } MyCloseAPI = (int (*)()) dlsym(commandLayer_handle,"CloseAPI"); if(MyCloseAPI == NULL) { printf("CloseAPI loading error\n"); } else { printf("CloseAPI loaded Successfully\n"); } MyGetClientConfigurations = (int (*)(ClientConfigurations &)) dlsym(commandLayer_handle,"GetClientConfigurations"); if(MyGetClientConfigurations == NULL) { printf("GetClientConfigurations loading error\n"); } else { printf("GetClientConfigurations loaded Successfully\n"); } } ClientConfigurations data; printf("Calling GetClientConfig\n"); (*MyInitAPI)(); (*MyGetClientConfigurations)(data); (*MyCloseAPI)(); printf("Cartesian velocity = %f", data.MaxTranslationVelocity); return 0; }
Kinova.API.UsbCommandLayer.h
KinovaTypes.h
Kinova.API.CommLayer.dll
Kinova.API.UsbCommandLayer.dll
mpusbapi.dll
Kinova.API.CommLayer.dll
Kinova.API.UsbCommandLayer.dll
mpusbapi.dll
#pragma once #include "targetver.h" #include <tchar.h> #include <Windows.h>
#include "stdafx.h" #include "Kinova.API.UsbCommandLayer.h" #include <conio.h> #include "KinovaTypes.h" #include <iostream> using namespace std; //A handle to the API. HINSTANCE commandLayer_handle; int (*MyInitAPI)(); int (*MyCloseAPI)(); int (*MyMoveHome)(); int (*MyInitFingers)(); int (*MySendAdvanceTrajectory)(TrajectoryPoint command); int (*MyGetQuickStatus)(QuickStatus &); //This example will move the robot, make sure that it can move freely without obstacles. First the robotical arm will be homed and, if needed, the fingers will be initialised. //After, the robot will go a few cm toward the front and it will raise for about 11 cm. int _tmain(int argc, _TCHAR* argv[]) { //We load the API. commandLayer_handle = LoadLibrary(L"Kinova.API.UsbCommandLayer.dll"); if(commandLayer_handle==NULL) { cout << "Error while loading library. Cannot perform test. Leaving,,," << endl; } else { cout << "API loaded Successfully" << endl; } //Initialise the function pointer from the API MyInitAPI = (int (*)()) GetProcAddress(commandLayer_handle,"InitAPI"); MyCloseAPI = (int (*)()) GetProcAddress(commandLayer_handle,"CloseAPI"); MySendAdvanceTrajectory = (int (*)(TrajectoryPoint)) GetProcAddress(commandLayer_handle,"SendAdvanceTrajectory"); MyMoveHome = (int (*)()) GetProcAddress(commandLayer_handle,"MoveHome"); MyInitFingers = (int (*)()) GetProcAddress(commandLayer_handle,"InitFingers"); MyGetQuickStatus = (int (*)(QuickStatus &)) GetProcAddress(commandLayer_handle,"GetQuickStatus"); //Verify that all functions has been loaded correctly if((MyInitAPI == NULL) || (MyCloseAPI == NULL) || (MySendAdvanceTrajectory == NULL) || (MyMoveHome == NULL) || (MyGetQuickStatus == NULL) || (MyInitFingers == NULL)) { cout << "Cannot load all the functions from the API." << endl; } else { int result = (*MyInitAPI)(); cout << "The result of InitAPI = " << result << endl; //We get some quick information about the robotical arm called quickstatus. We will use that to check if the robotical arm is a MICO or a JACO. QuickStatus status; (*MyGetQuickStatus)(status); //We tell the robotical arm to go to the READY position (*MyMoveHome)(); //We initialise the fingers if they need to be initialised. (*MyInitFingers)(); //This is the point that will be send to the robot. TrajectoryPoint pointToSend; pointToSend.InitStruct(); pointToSend.Position.HandMode = POSITION_MODE; pointToSend.Position.Type = CARTESIAN_POSITION; pointToSend.LimitationsActive = 1; //Note that the first position has a velocity limitation of 8 cm/sec pointToSend.Limitations.speedParameter1 = 0.08; pointToSend.Limitations.speedParameter2 = 0.7; pointToSend.Position.CartesianPosition.X = 0.21f; pointToSend.Position.CartesianPosition.Y = -0.35f; pointToSend.Position.CartesianPosition.Z = 0.48f; pointToSend.Position.CartesianPosition.ThetaX = 1.55f; pointToSend.Position.CartesianPosition.ThetaY = 1.02f; pointToSend.Position.CartesianPosition.ThetaZ = -0.03f; //If the robotic arm is a JACO, we use those value for the fingers. if(status.RobotType == 0) { pointToSend.Position.Fingers.Finger1 = 45.0f; pointToSend.Position.Fingers.Finger2 = 45.0f; pointToSend.Position.Fingers.Finger3 = 45.0f; } //If the robotic arm is a MICO, we use those value for the fingers. else if(status.RobotType == 1) { pointToSend.Position.Fingers.Finger1 = 4500.0f; pointToSend.Position.Fingers.Finger2 = 4500.0f; pointToSend.Position.Fingers.Finger3 = 4500.0f; } else { pointToSend.Position.Fingers.Finger1 = 0.0f; pointToSend.Position.Fingers.Finger2 = 0.0f; pointToSend.Position.Fingers.Finger3 = 0.0f; } cout << "Sending trajectory" << endl; (*MySendAdvanceTrajectory)(pointToSend); pointToSend.LimitationsActive = 0; pointToSend.Position.CartesianPosition.Z = 0.59f; cout << "Sending trajectory" << endl; (*MySendAdvanceTrajectory)(pointToSend); cout << "End of example" << endl; (*MyCloseAPI)(); } //We free the memory of the library. FreeLibrary(commandLayer_handle); _getch(); return 0; }