--- libgig/trunk/src/gig.cpp 2006/10/29 17:57:20 930 +++ libgig/trunk/src/gig.cpp 2007/04/13 16:41:18 1158 @@ -1,8 +1,8 @@ /*************************************************************************** * * - * libgig - C++ cross-platform Gigasampler format file loader library * + * libgig - C++ cross-platform Gigasampler format file access library * * * - * Copyright (C) 2003-2006 by Christian Schoenebeck * + * Copyright (C) 2003-2007 by Christian Schoenebeck * * * * * * This library is free software; you can redistribute it and/or modify * @@ -254,6 +254,30 @@ } + +// *************** Other Internal functions *************** +// * + + static split_type_t __resolveSplitType(dimension_t dimension) { + return ( + dimension == dimension_layer || + dimension == dimension_samplechannel || + dimension == dimension_releasetrigger || + dimension == dimension_keyboard || + dimension == dimension_roundrobin || + dimension == dimension_random || + dimension == dimension_smartmidi || + dimension == dimension_roundrobinkeyboard + ) ? split_type_bit : split_type_normal; + } + + static int __resolveZoneSize(dimension_def_t& dimension_definition) { + return (dimension_definition.split_type == split_type_normal) + ? int(128.0 / dimension_definition.zones) : 0; + } + + + // *************** Sample *************** // * @@ -364,7 +388,7 @@ * Usually there is absolutely no need to call this method explicitly. * It will be called automatically when File::Save() was called. * - * @throws DLS::Exception if FormatTag != WAVE_FORMAT_PCM or no sample data + * @throws DLS::Exception if FormatTag != DLS_WAVE_FORMAT_PCM or no sample data * was provided yet * @throws gig::Exception if there is any invalid sample setting */ @@ -635,13 +659,13 @@ * enlarged samples before calling File::Save() as this might exceed the * current sample's boundary! * - * Also note: only WAVE_FORMAT_PCM is currently supported, that is - * FormatTag must be WAVE_FORMAT_PCM. Trying to resize samples with + * Also note: only DLS_WAVE_FORMAT_PCM is currently supported, that is + * FormatTag must be DLS_WAVE_FORMAT_PCM. Trying to resize samples with * other formats will fail! * * @param iNewSize - new sample wave data size in sample points (must be * greater than zero) - * @throws DLS::Excecption if FormatTag != WAVE_FORMAT_PCM + * @throws DLS::Excecption if FormatTag != DLS_WAVE_FORMAT_PCM * or if \a iNewSize is less than 1 * @throws gig::Exception if existing sample is compressed * @see DLS::Sample::GetSize(), DLS::Sample::FrameSize, @@ -1338,6 +1362,11 @@ if (lfo3ctrl & 0x40) // bit 6 VCFType = vcf_type_lowpassturbo; } + if (_3ewa->RemainingBytes() >= 8) { + _3ewa->Read(DimensionUpperLimits, 1, 8); + } else { + memset(DimensionUpperLimits, 0, 8); + } } else { // '3ewa' chunk does not exist yet // use default values LFO3Frequency = 1.0; @@ -1418,6 +1447,7 @@ VCFVelocityDynamicRange = 0x04; VCFVelocityCurve = curve_type_linear; VCFType = vcf_type_lowpass; + memset(DimensionUpperLimits, 0, 8); } pVelocityAttenuationTable = GetVelocityTable(VelocityResponseCurve, @@ -1473,8 +1503,8 @@ // update '3ewa' chunk with DimensionRegion's current settings - const uint32_t unknown = _3ewa->GetSize(); // unknown, always chunk size ? - memcpy(&pData[0], &unknown, 4); + const uint32_t chunksize = _3ewa->GetSize(); + memcpy(&pData[0], &chunksize, 4); // unknown, always chunk size? const int32_t lfo3freq = (int32_t) GIG_EXP_ENCODE(LFO3Frequency); memcpy(&pData[4], &lfo3freq, 4); @@ -1729,6 +1759,10 @@ const uint8_t vcftype = (VCFType == vcf_type_lowpassturbo) ? vcf_type_lowpass : VCFType; memcpy(&pData[139], &vcftype, 1); + + if (chunksize >= 148) { + memcpy(&pData[140], DimensionUpperLimits, 8); + } } // get the corresponding velocity table from the table map or create & calculate that table if it doesn't exist yet @@ -2105,16 +2139,8 @@ pDimensionDefinitions[i].dimension = dimension; pDimensionDefinitions[i].bits = bits; pDimensionDefinitions[i].zones = zones ? zones : 0x01 << bits; // = pow(2,bits) - pDimensionDefinitions[i].split_type = (dimension == dimension_layer || - dimension == dimension_samplechannel || - dimension == dimension_releasetrigger || - dimension == dimension_keyboard || - dimension == dimension_roundrobin || - dimension == dimension_random) ? split_type_bit - : split_type_normal; - pDimensionDefinitions[i].zone_size = - (pDimensionDefinitions[i].split_type == split_type_normal) ? 128.0 / pDimensionDefinitions[i].zones - : 0; + pDimensionDefinitions[i].split_type = __resolveSplitType(dimension); + pDimensionDefinitions[i].zone_size = __resolveZoneSize(pDimensionDefinitions[i]); Dimensions++; // if this is a layer dimension, remember the amount of layers @@ -2140,6 +2166,8 @@ if (file->pWavePoolTable) pDimensionRegions[i]->pSample = GetSampleFromWavePool(wavepoolindex); } GetSample(); // load global region sample reference + } else { + DimensionRegions = 0; } // make sure there is at least one dimension region @@ -2162,6 +2190,12 @@ * @throws gig::Exception if samples cannot be dereferenced */ void Region::UpdateChunks() { + // in the gig format we don't care about the Region's sample reference + // but we still have to provide some existing one to not corrupt the + // file, so to avoid the latter we simply always assign the sample of + // the first dimension region of this region + pSample = pDimensionRegions[0]->pSample; + // first update base class's chunks DLS::Region::UpdateChunks(); @@ -2248,7 +2282,8 @@ int dim[8] = { 0 }; for (int i = 0 ; i < DimensionRegions ; i++) { - if (pDimensionRegions[i]->VelocityUpperLimit) { + if (pDimensionRegions[i]->DimensionUpperLimits[veldim] || + pDimensionRegions[i]->VelocityUpperLimit) { // create the velocity table uint8_t* table = pDimensionRegions[i]->VelocityTable; if (!table) { @@ -2257,10 +2292,18 @@ } int tableidx = 0; int velocityZone = 0; - for (int k = i ; k < end ; k += step) { - DimensionRegion *d = pDimensionRegions[k]; - for (; tableidx <= d->VelocityUpperLimit ; tableidx++) table[tableidx] = velocityZone; - velocityZone++; + if (pDimensionRegions[i]->DimensionUpperLimits[veldim]) { // gig3 + for (int k = i ; k < end ; k += step) { + DimensionRegion *d = pDimensionRegions[k]; + for (; tableidx <= d->DimensionUpperLimits[veldim] ; tableidx++) table[tableidx] = velocityZone; + velocityZone++; + } + } else { // gig2 + for (int k = i ; k < end ; k += step) { + DimensionRegion *d = pDimensionRegions[k]; + for (; tableidx <= d->VelocityUpperLimit ; tableidx++) table[tableidx] = velocityZone; + velocityZone++; + } } } else { if (pDimensionRegions[i]->VelocityTable) { @@ -2327,6 +2370,12 @@ // assign definition of new dimension pDimensionDefinitions[Dimensions] = *pDimDef; + // auto correct certain dimension definition fields (where possible) + pDimensionDefinitions[Dimensions].split_type = + __resolveSplitType(pDimensionDefinitions[Dimensions].dimension); + pDimensionDefinitions[Dimensions].zone_size = + __resolveZoneSize(pDimensionDefinitions[Dimensions]); + // create new dimension region(s) for this new dimension for (int i = 1 << iCurrentBits; i < 1 << iNewBits; i++) { //TODO: maybe we should copy existing dimension regions if possible instead of simply creating new ones with default values @@ -2455,7 +2504,15 @@ } else { switch (pDimensionDefinitions[i].split_type) { case split_type_normal: - bits = uint8_t(DimValues[i] / pDimensionDefinitions[i].zone_size); + if (pDimensionRegions[0]->DimensionUpperLimits[i]) { + // gig3: all normal dimensions (not just the velocity dimension) have custom zone ranges + for (bits = 0 ; bits < pDimensionDefinitions[i].zones ; bits++) { + if (DimValues[i] <= pDimensionRegions[bits << bitpos]->DimensionUpperLimits[i]) break; + } + } else { + // gig2: evenly sized zones + bits = uint8_t(DimValues[i] / pDimensionDefinitions[i].zone_size); + } break; case split_type_bit: // the value is already the sought dimension bit number const uint8_t limiter_mask = (0xff << pDimensionDefinitions[i].bits) ^ 0xff; @@ -2469,7 +2526,7 @@ DimensionRegion* dimreg = pDimensionRegions[dimregidx]; if (veldim != -1) { // (dimreg is now the dimension region for the lowest velocity) - if (dimreg->VelocityUpperLimit) // custom defined zone ranges + if (dimreg->VelocityTable) // custom defined zone ranges bits = dimreg->VelocityTable[DimValues[veldim]]; else // normal split type bits = uint8_t(DimValues[veldim] / pDimensionDefinitions[veldim].zone_size); @@ -2711,12 +2768,17 @@ } Group::~Group() { + // remove the chunk associated with this group (if any) + if (pNameChunk) pNameChunk->GetParent()->DeleteSubChunk(pNameChunk); } /** @brief Update chunks with current group settings. * - * Apply current Group field values to the respective. You have to call - * File::Save() to make changes persistent. + * Apply current Group field values to the respective chunks. You have + * to call File::Save() to make changes persistent. + * + * Usually there is absolutely no need to call this method explicitly. + * It will be called automatically when File::Save() was called. */ void Group::UpdateChunks() { // make sure <3gri> and <3gnl> list chunks exist @@ -2864,6 +2926,7 @@ if (!pSamples || !pSamples->size()) throw gig::Exception("Could not delete sample as there are no samples"); SampleList::iterator iter = find(pSamples->begin(), pSamples->end(), (DLS::Sample*) pSample); if (iter == pSamples->end()) throw gig::Exception("Could not delete sample, could not find given sample"); + if (SamplesIterator != pSamples->end() && *SamplesIterator == pSample) ++SamplesIterator; // avoid iterator invalidation pSamples->erase(iter); delete pSample; } @@ -2875,7 +2938,7 @@ void File::LoadSamples(progress_t* pProgress) { // Groups must be loaded before samples, because samples will try // to resolve the group they belong to - LoadGroups(); + if (!pGroups) LoadGroups(); if (!pSamples) pSamples = new SampleList; @@ -3000,7 +3063,7 @@ * have to call Save() to make this persistent to the file. * * @param pInstrument - instrument to delete - * @throws gig::Excption if given instrument could not be found + * @throws gig::Exception if given instrument could not be found */ void File::DeleteInstrument(Instrument* pInstrument) { if (!pInstruments) throw gig::Exception("Could not delete instrument as there are no instruments"); @@ -3078,11 +3141,44 @@ return pGroup; } + /** @brief Delete a group and its samples. + * + * This will delete the given Group object and all the samples that + * belong to this group from the gig file. You have to call Save() to + * make this persistent to the file. + * + * @param pGroup - group to delete + * @throws gig::Exception if given group could not be found + */ void File::DeleteGroup(Group* pGroup) { if (!pGroups) LoadGroups(); std::list::iterator iter = find(pGroups->begin(), pGroups->end(), pGroup); if (iter == pGroups->end()) throw gig::Exception("Could not delete group, could not find given group"); if (pGroups->size() == 1) throw gig::Exception("Cannot delete group, there must be at least one default group!"); + // delete all members of this group + for (Sample* pSample = pGroup->GetFirstSample(); pSample; pSample = pGroup->GetNextSample()) { + DeleteSample(pSample); + } + // now delete this group object + pGroups->erase(iter); + delete pGroup; + } + + /** @brief Delete a group. + * + * This will delete the given Group object from the gig file. All the + * samples that belong to this group will not be deleted, but instead + * be moved to another group. You have to call Save() to make this + * persistent to the file. + * + * @param pGroup - group to delete + * @throws gig::Exception if given group could not be found + */ + void File::DeleteGroupOnly(Group* pGroup) { + if (!pGroups) LoadGroups(); + std::list::iterator iter = find(pGroups->begin(), pGroups->end(), pGroup); + if (iter == pGroups->end()) throw gig::Exception("Could not delete group, could not find given group"); + if (pGroups->size() == 1) throw gig::Exception("Cannot delete group, there must be at least one default group!"); // move all members of this group to another group pGroup->MoveAll(); pGroups->erase(iter); @@ -3113,6 +3209,30 @@ } } + /** + * Apply all the gig file's current instruments, samples, groups and settings + * to the respective RIFF chunks. You have to call Save() to make changes + * persistent. + * + * Usually there is absolutely no need to call this method explicitly. + * It will be called automatically when File::Save() was called. + * + * @throws Exception - on errors + */ + void File::UpdateChunks() { + // first update base class's chunks + DLS::File::UpdateChunks(); + + // update group's chunks + if (pGroups) { + std::list::iterator iter = pGroups->begin(); + std::list::iterator end = pGroups->end(); + for (; iter != end; ++iter) { + (*iter)->UpdateChunks(); + } + } + } + // *************** Exception ***************