Customizing Media Encoder Standard presets

Overview

This article shows how to perform advanced encoding with Media Encoder Standard (MES) using a custom preset. The article uses .NET to create an encoding task and a job that executes this task.

This article shows you how to customize a preset by taking the H264 Multiple Bitrate 720p preset and reducing the number of layers. The Customizing Media Encoder Standard presets article demonstrates custom presets that can be used to perform advanced encoding tasks.

Note

The custom presets described in this article cannot be used in Media Services V3 transforms or the CLI commands. See the migration guidance from v2 to v3 for more details.

Customizing a MES preset

Original preset

Save the JSON defined in the H264 Multiple Bitrate 720p article in some file with .json extension. For example, CustomPreset_JSON.json.

Customized preset

Open the CustomPreset_JSON.json file and remove first three layers from H264Layers so your file looks like this.

  {
    "Version": 1.0,
    "Codecs": [
      {
        "KeyFrameInterval": "00:00:02",
        "H264Layers": [
          {
            "Profile": "Auto",
            "Level": "auto",
            "Bitrate": 1000,
            "MaxBitrate": 1000,
            "BufferWindow": "00:00:05",
            "Width": 640,
            "Height": 360,
            "BFrames": 3,
            "ReferenceFrames": 3,
            "AdaptiveBFrame": true,
            "Type": "H264Layer",
            "FrameRate": "0/1"
          },
          {
            "Profile": "Auto",
            "Level": "auto",
            "Bitrate": 650,
            "MaxBitrate": 650,
            "BufferWindow": "00:00:05",
            "Width": 640,
            "Height": 360,
            "BFrames": 3,
            "ReferenceFrames": 3,
            "AdaptiveBFrame": true,
            "Type": "H264Layer",
            "FrameRate": "0/1"
          },
          {
            "Profile": "Auto",
            "Level": "auto",
            "Bitrate": 400,
            "MaxBitrate": 400,
            "BufferWindow": "00:00:05",
            "Width": 320,
            "Height": 180,
            "BFrames": 3,
            "ReferenceFrames": 3,
            "AdaptiveBFrame": true,
            "Type": "H264Layer",
            "FrameRate": "0/1"
          }
        ],
        "Type": "H264Video"
      },
      {
        "Profile": "AACLC",
        "Channels": 2,
        "SamplingRate": 48000,
        "Bitrate": 128,
        "Type": "AACAudio"
      }
    ],
    "Outputs": [
      {
        "FileName": "{Basename}_{Width}x{Height}_{VideoBitrate}.mp4",
        "Format": {
          "Type": "MP4Format"
        }
      }
    ]
  }

Encoding with Media Services .NET SDK

The following code example uses Media Services .NET SDK to perform the following tasks:

  • Create an encoding job.

  • Get a reference to the Media Encoder Standard encoder.

  • Load the custom JSON preset that you created in the previous section.

    // Load the JSON from the local file.
    string configuration = File.ReadAllText(fileName);
    
  • Add an encoding task to the job.

  • Specify the input asset to be encoded.

  • Create an output asset that contains the encoded asset.

  • Add an event handler to check the job progress.

  • Submit the job.

Create and configure a Visual Studio project

Set up your development environment and populate the app.config file with connection information, as described in Media Services development with .NET.

Example

using System;
using System.Configuration;
using System.IO;
using System.Linq;
using Microsoft.WindowsAzure.MediaServices.Client;
using System.Threading;

namespace CustomizeMESPresests
{
    class Program
    {
        // Read values from the App.config file.
        private static readonly string _AADTenantDomain =
            ConfigurationManager.AppSettings["AMSAADTenantDomain"];
        private static readonly string _RESTAPIEndpoint =
            ConfigurationManager.AppSettings["AMSRESTAPIEndpoint"];
        private static readonly string _AMSClientId =
            ConfigurationManager.AppSettings["AMSClientId"];
        private static readonly string _AMSClientSecret =
            ConfigurationManager.AppSettings["AMSClientSecret"];

        // Field for service context.
        private static CloudMediaContext _context = null;

        private static readonly string _mediaFiles =
            Path.GetFullPath(@"../..\Media");

        private static readonly string _singleMP4File =
            Path.Combine(_mediaFiles, @"BigBuckBunny.mp4");

        static void Main(string[] args)
        {
            AzureAdTokenCredentials tokenCredentials =
                new AzureAdTokenCredentials(_AADTenantDomain,
                    new AzureAdClientSymmetricKey(_AMSClientId, _AMSClientSecret),
                    AzureEnvironments.AzureCloudEnvironment);

            var tokenProvider = new AzureAdTokenProvider(tokenCredentials);

            _context = new CloudMediaContext(new Uri(_RESTAPIEndpoint), tokenProvider);

            // Get an uploaded asset.
            var asset = _context.Assets.FirstOrDefault();

            // Encode and generate the output using custom presets.
            EncodeToAdaptiveBitrateMP4Set(asset);

            Console.ReadLine();
        }

        static public IAsset EncodeToAdaptiveBitrateMP4Set(IAsset asset)
        {
            // Declare a new job.
            IJob job = _context.Jobs.Create("Media Encoder Standard Job");
            // Get a media processor reference, and pass to it the name of the
            // processor to use for the specific task.
            IMediaProcessor processor = GetLatestMediaProcessorByName("Media Encoder Standard");

            // Load the XML (or JSON) from the local file.
            string configuration = File.ReadAllText("CustomPreset_JSON.json");

            // Create a task
            ITask task = job.Tasks.AddNew("Media Encoder Standard encoding task",
            processor,
            configuration,
            TaskOptions.None);

            // Specify the input asset to be encoded.
            task.InputAssets.Add(asset);
            // Add an output asset to contain the results of the job.
            // This output is specified as AssetCreationOptions.None, which
            // means the output asset is not encrypted.
            task.OutputAssets.AddNew("Output asset",
            AssetCreationOptions.None);

            job.StateChanged += new EventHandler<JobStateChangedEventArgs>(JobStateChanged);
            job.Submit();
            job.GetExecutionProgressTask(CancellationToken.None).Wait();

            return job.OutputMediaAssets[0];
        }

        private static void JobStateChanged(object sender, JobStateChangedEventArgs e)
        {
            Console.WriteLine("Job state changed event:");
            Console.WriteLine("  Previous state: " + e.PreviousState);
            Console.WriteLine("  Current state: " + e.CurrentState);
            switch (e.CurrentState)
            {
                case JobState.Finished:
                    Console.WriteLine();
                    Console.WriteLine("Job is finished. Please wait while local tasks or downloads complete...");
                    break;
                case JobState.Canceling:
                case JobState.Queued:
                case JobState.Scheduled:
                case JobState.Processing:
                    Console.WriteLine("Please wait...\n");
                    break;
                case JobState.Canceled:
                case JobState.Error:

                    // Cast sender as a job.
                    IJob job = (IJob)sender;

                    // Display or log error details as needed.
                    break;
                default:
                    break;
            }
        }

        private static IMediaProcessor GetLatestMediaProcessorByName(string mediaProcessorName)
        {
            var processor = _context.MediaProcessors.Where(p => p.Name == mediaProcessorName).
            ToList().OrderBy(p => new Version(p.Version)).LastOrDefault();

            if (processor == null)
                throw new ArgumentException(string.Format("Unknown media processor", mediaProcessorName));

            return processor;
        }

    }
}